var/home/core/zuul-output/0000755000175000017500000000000015111302300014507 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015111310223015456 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log0000644000000000000000004311142315111310214017661 0ustar rootrootNov 25 10:31:41 crc systemd[1]: Starting Kubernetes Kubelet... Nov 25 10:31:41 crc restorecon[4686]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:41 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Nov 25 10:31:42 crc restorecon[4686]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Nov 25 10:31:43 crc kubenswrapper[4702]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 10:31:43 crc kubenswrapper[4702]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Nov 25 10:31:43 crc kubenswrapper[4702]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 10:31:43 crc kubenswrapper[4702]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 10:31:43 crc kubenswrapper[4702]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Nov 25 10:31:43 crc kubenswrapper[4702]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.111274 4702 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117856 4702 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117892 4702 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117930 4702 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117943 4702 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117953 4702 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117962 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117971 4702 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117979 4702 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117987 4702 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.117998 4702 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118006 4702 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118013 4702 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118020 4702 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118027 4702 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118033 4702 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118040 4702 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118046 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118053 4702 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118060 4702 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118066 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118073 4702 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118080 4702 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118086 4702 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118093 4702 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118100 4702 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118107 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118114 4702 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118120 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118126 4702 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118133 4702 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118139 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118147 4702 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118154 4702 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118160 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118167 4702 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118173 4702 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118180 4702 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118187 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118195 4702 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118202 4702 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118209 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118216 4702 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118222 4702 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118229 4702 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118235 4702 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118242 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118248 4702 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118255 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118261 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118268 4702 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118275 4702 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118282 4702 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118293 4702 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118299 4702 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118306 4702 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118313 4702 feature_gate.go:330] unrecognized feature gate: Example Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118320 4702 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118326 4702 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118333 4702 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118339 4702 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118346 4702 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118353 4702 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118360 4702 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118367 4702 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118373 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118379 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118385 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118394 4702 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118403 4702 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118413 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.118422 4702 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118567 4702 flags.go:64] FLAG: --address="0.0.0.0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118586 4702 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118599 4702 flags.go:64] FLAG: --anonymous-auth="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118609 4702 flags.go:64] FLAG: --application-metrics-count-limit="100" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118618 4702 flags.go:64] FLAG: --authentication-token-webhook="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118627 4702 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118638 4702 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118646 4702 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118654 4702 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118662 4702 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118671 4702 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118679 4702 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118687 4702 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118696 4702 flags.go:64] FLAG: --cgroup-root="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118704 4702 flags.go:64] FLAG: --cgroups-per-qos="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118712 4702 flags.go:64] FLAG: --client-ca-file="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118719 4702 flags.go:64] FLAG: --cloud-config="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118727 4702 flags.go:64] FLAG: --cloud-provider="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118735 4702 flags.go:64] FLAG: --cluster-dns="[]" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118747 4702 flags.go:64] FLAG: --cluster-domain="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118755 4702 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118763 4702 flags.go:64] FLAG: --config-dir="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118772 4702 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118780 4702 flags.go:64] FLAG: --container-log-max-files="5" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118791 4702 flags.go:64] FLAG: --container-log-max-size="10Mi" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118798 4702 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118807 4702 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118815 4702 flags.go:64] FLAG: --containerd-namespace="k8s.io" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118822 4702 flags.go:64] FLAG: --contention-profiling="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118830 4702 flags.go:64] FLAG: --cpu-cfs-quota="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118838 4702 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118847 4702 flags.go:64] FLAG: --cpu-manager-policy="none" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118855 4702 flags.go:64] FLAG: --cpu-manager-policy-options="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118864 4702 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118872 4702 flags.go:64] FLAG: --enable-controller-attach-detach="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118881 4702 flags.go:64] FLAG: --enable-debugging-handlers="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118889 4702 flags.go:64] FLAG: --enable-load-reader="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118897 4702 flags.go:64] FLAG: --enable-server="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118931 4702 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118942 4702 flags.go:64] FLAG: --event-burst="100" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118950 4702 flags.go:64] FLAG: --event-qps="50" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118959 4702 flags.go:64] FLAG: --event-storage-age-limit="default=0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118967 4702 flags.go:64] FLAG: --event-storage-event-limit="default=0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118975 4702 flags.go:64] FLAG: --eviction-hard="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118984 4702 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.118992 4702 flags.go:64] FLAG: --eviction-minimum-reclaim="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119000 4702 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119009 4702 flags.go:64] FLAG: --eviction-soft="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119017 4702 flags.go:64] FLAG: --eviction-soft-grace-period="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119025 4702 flags.go:64] FLAG: --exit-on-lock-contention="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119032 4702 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119040 4702 flags.go:64] FLAG: --experimental-mounter-path="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119049 4702 flags.go:64] FLAG: --fail-cgroupv1="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119056 4702 flags.go:64] FLAG: --fail-swap-on="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119065 4702 flags.go:64] FLAG: --feature-gates="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119077 4702 flags.go:64] FLAG: --file-check-frequency="20s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119085 4702 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119094 4702 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119102 4702 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119110 4702 flags.go:64] FLAG: --healthz-port="10248" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119118 4702 flags.go:64] FLAG: --help="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119125 4702 flags.go:64] FLAG: --hostname-override="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119133 4702 flags.go:64] FLAG: --housekeeping-interval="10s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119143 4702 flags.go:64] FLAG: --http-check-frequency="20s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119151 4702 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119159 4702 flags.go:64] FLAG: --image-credential-provider-config="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119167 4702 flags.go:64] FLAG: --image-gc-high-threshold="85" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119175 4702 flags.go:64] FLAG: --image-gc-low-threshold="80" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119183 4702 flags.go:64] FLAG: --image-service-endpoint="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119190 4702 flags.go:64] FLAG: --kernel-memcg-notification="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119198 4702 flags.go:64] FLAG: --kube-api-burst="100" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119206 4702 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119214 4702 flags.go:64] FLAG: --kube-api-qps="50" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119221 4702 flags.go:64] FLAG: --kube-reserved="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119229 4702 flags.go:64] FLAG: --kube-reserved-cgroup="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119236 4702 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119245 4702 flags.go:64] FLAG: --kubelet-cgroups="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119252 4702 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119260 4702 flags.go:64] FLAG: --lock-file="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119268 4702 flags.go:64] FLAG: --log-cadvisor-usage="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119275 4702 flags.go:64] FLAG: --log-flush-frequency="5s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119283 4702 flags.go:64] FLAG: --log-json-info-buffer-size="0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119294 4702 flags.go:64] FLAG: --log-json-split-stream="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119302 4702 flags.go:64] FLAG: --log-text-info-buffer-size="0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119310 4702 flags.go:64] FLAG: --log-text-split-stream="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119318 4702 flags.go:64] FLAG: --logging-format="text" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119327 4702 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119336 4702 flags.go:64] FLAG: --make-iptables-util-chains="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119345 4702 flags.go:64] FLAG: --manifest-url="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119352 4702 flags.go:64] FLAG: --manifest-url-header="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119363 4702 flags.go:64] FLAG: --max-housekeeping-interval="15s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119372 4702 flags.go:64] FLAG: --max-open-files="1000000" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119382 4702 flags.go:64] FLAG: --max-pods="110" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119389 4702 flags.go:64] FLAG: --maximum-dead-containers="-1" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119398 4702 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119407 4702 flags.go:64] FLAG: --memory-manager-policy="None" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119415 4702 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119423 4702 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119430 4702 flags.go:64] FLAG: --node-ip="192.168.126.11" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119438 4702 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119464 4702 flags.go:64] FLAG: --node-status-max-images="50" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119472 4702 flags.go:64] FLAG: --node-status-update-frequency="10s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119480 4702 flags.go:64] FLAG: --oom-score-adj="-999" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119488 4702 flags.go:64] FLAG: --pod-cidr="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119496 4702 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119507 4702 flags.go:64] FLAG: --pod-manifest-path="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119515 4702 flags.go:64] FLAG: --pod-max-pids="-1" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119523 4702 flags.go:64] FLAG: --pods-per-core="0" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119530 4702 flags.go:64] FLAG: --port="10250" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119539 4702 flags.go:64] FLAG: --protect-kernel-defaults="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119546 4702 flags.go:64] FLAG: --provider-id="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119554 4702 flags.go:64] FLAG: --qos-reserved="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119563 4702 flags.go:64] FLAG: --read-only-port="10255" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119577 4702 flags.go:64] FLAG: --register-node="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119586 4702 flags.go:64] FLAG: --register-schedulable="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119593 4702 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119607 4702 flags.go:64] FLAG: --registry-burst="10" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119615 4702 flags.go:64] FLAG: --registry-qps="5" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119623 4702 flags.go:64] FLAG: --reserved-cpus="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119631 4702 flags.go:64] FLAG: --reserved-memory="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119641 4702 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119648 4702 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119656 4702 flags.go:64] FLAG: --rotate-certificates="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119663 4702 flags.go:64] FLAG: --rotate-server-certificates="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119671 4702 flags.go:64] FLAG: --runonce="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119679 4702 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119687 4702 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119696 4702 flags.go:64] FLAG: --seccomp-default="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119705 4702 flags.go:64] FLAG: --serialize-image-pulls="true" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119712 4702 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119720 4702 flags.go:64] FLAG: --storage-driver-db="cadvisor" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119728 4702 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119736 4702 flags.go:64] FLAG: --storage-driver-password="root" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119744 4702 flags.go:64] FLAG: --storage-driver-secure="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119752 4702 flags.go:64] FLAG: --storage-driver-table="stats" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119760 4702 flags.go:64] FLAG: --storage-driver-user="root" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119768 4702 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119776 4702 flags.go:64] FLAG: --sync-frequency="1m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119784 4702 flags.go:64] FLAG: --system-cgroups="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119791 4702 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119805 4702 flags.go:64] FLAG: --system-reserved-cgroup="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119812 4702 flags.go:64] FLAG: --tls-cert-file="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119819 4702 flags.go:64] FLAG: --tls-cipher-suites="[]" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119830 4702 flags.go:64] FLAG: --tls-min-version="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119838 4702 flags.go:64] FLAG: --tls-private-key-file="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119846 4702 flags.go:64] FLAG: --topology-manager-policy="none" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119854 4702 flags.go:64] FLAG: --topology-manager-policy-options="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119862 4702 flags.go:64] FLAG: --topology-manager-scope="container" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119870 4702 flags.go:64] FLAG: --v="2" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119880 4702 flags.go:64] FLAG: --version="false" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119891 4702 flags.go:64] FLAG: --vmodule="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119929 4702 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.119938 4702 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120106 4702 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120117 4702 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120125 4702 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120133 4702 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120139 4702 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120148 4702 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120158 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120166 4702 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120174 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120180 4702 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120188 4702 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120196 4702 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120205 4702 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120213 4702 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120220 4702 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120228 4702 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120237 4702 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120291 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120299 4702 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120306 4702 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120314 4702 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120321 4702 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120328 4702 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120335 4702 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120343 4702 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120350 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120358 4702 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120366 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120373 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120382 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120389 4702 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120396 4702 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120403 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120411 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120418 4702 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120424 4702 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120431 4702 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120437 4702 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120444 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120461 4702 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120467 4702 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120474 4702 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120480 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120486 4702 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120493 4702 feature_gate.go:330] unrecognized feature gate: Example Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120499 4702 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120507 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120514 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120521 4702 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120527 4702 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120533 4702 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120540 4702 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120547 4702 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120553 4702 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120560 4702 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120567 4702 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120574 4702 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120580 4702 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120587 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120593 4702 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120600 4702 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120607 4702 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120615 4702 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120621 4702 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120628 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120635 4702 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120641 4702 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120648 4702 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120654 4702 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120661 4702 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.120667 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.120694 4702 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.135851 4702 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.135949 4702 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136103 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136117 4702 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136128 4702 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136137 4702 feature_gate.go:330] unrecognized feature gate: Example Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136145 4702 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136153 4702 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136160 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136168 4702 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136176 4702 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136183 4702 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136191 4702 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136199 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136252 4702 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136263 4702 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136272 4702 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136281 4702 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136291 4702 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136300 4702 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136309 4702 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136317 4702 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136325 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136332 4702 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136340 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136348 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136356 4702 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136364 4702 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136371 4702 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136381 4702 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136389 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136397 4702 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136406 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136414 4702 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136422 4702 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136430 4702 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136438 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136445 4702 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136453 4702 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136461 4702 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136468 4702 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136476 4702 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136484 4702 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136492 4702 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136499 4702 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136507 4702 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136515 4702 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136522 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136530 4702 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136538 4702 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136545 4702 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136553 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136560 4702 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136569 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136577 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136588 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136599 4702 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136612 4702 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136622 4702 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136633 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136642 4702 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136651 4702 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136661 4702 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136671 4702 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136692 4702 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136708 4702 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136723 4702 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136737 4702 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136750 4702 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136761 4702 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136771 4702 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136779 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.136787 4702 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.136799 4702 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137082 4702 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137099 4702 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137109 4702 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137119 4702 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137127 4702 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137137 4702 feature_gate.go:330] unrecognized feature gate: NewOLM Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137146 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137155 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137163 4702 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137171 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137180 4702 feature_gate.go:330] unrecognized feature gate: Example Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137188 4702 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137196 4702 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137205 4702 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137214 4702 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137223 4702 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137231 4702 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137239 4702 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137247 4702 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137255 4702 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137264 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137272 4702 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137280 4702 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137288 4702 feature_gate.go:330] unrecognized feature gate: GatewayAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137296 4702 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137303 4702 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137311 4702 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137319 4702 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137327 4702 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137335 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137343 4702 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137351 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137358 4702 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137366 4702 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137374 4702 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137382 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfig Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137390 4702 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137400 4702 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137411 4702 feature_gate.go:330] unrecognized feature gate: SignatureStores Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137419 4702 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137427 4702 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137436 4702 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137446 4702 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137457 4702 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137466 4702 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137475 4702 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137484 4702 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137493 4702 feature_gate.go:330] unrecognized feature gate: PinnedImages Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137502 4702 feature_gate.go:330] unrecognized feature gate: PlatformOperators Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137510 4702 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137518 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137529 4702 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137539 4702 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137549 4702 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137559 4702 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137573 4702 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137586 4702 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137598 4702 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137608 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137619 4702 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137629 4702 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137638 4702 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137646 4702 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137654 4702 feature_gate.go:330] unrecognized feature gate: OVNObservability Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137662 4702 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137670 4702 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137677 4702 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137688 4702 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137697 4702 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137705 4702 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.137714 4702 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.137726 4702 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.138083 4702 server.go:940] "Client rotation is on, will bootstrap in background" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.144627 4702 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.144785 4702 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.146970 4702 server.go:997] "Starting client certificate rotation" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.147019 4702 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.148397 4702 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-17 15:45:32.573173224 +0000 UTC Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.148635 4702 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 1277h13m49.424542607s for next certificate rotation Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.185640 4702 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.191084 4702 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.212303 4702 log.go:25] "Validated CRI v1 runtime API" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.259821 4702 log.go:25] "Validated CRI v1 image API" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.263038 4702 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.275813 4702 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2025-11-25-10-27-12-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.276302 4702 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:49 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:42 fsType:tmpfs blockSize:0}] Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.308539 4702 manager.go:217] Machine: {Timestamp:2025-11-25 10:31:43.30505208 +0000 UTC m=+0.671647819 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654128640 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:163f1bb7-285f-4115-b335-3dabed78c4ea BootID:a04f18ec-6b5a-47c8-b0b5-77f700b576f7 Filesystems:[{Device:/run/user/1000 DeviceMajor:0 DeviceMinor:49 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:42 Capacity:1073741824 Type:vfs Inodes:4108170 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827064320 Type:vfs Inodes:4108170 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730825728 Type:vfs Inodes:819200 HasInodes:true} {Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827064320 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:c7:88:4e Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:c7:88:4e Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:6c:6d:52 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:6f:f9:82 Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:e9:4f:bf Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:81:f1:26 Speed:-1 Mtu:1496} {Name:eth10 MacAddress:06:0d:51:f5:43:79 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:e6:21:c3:d0:8e:97 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654128640 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.308825 4702 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.309134 4702 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.310384 4702 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.310650 4702 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.310695 4702 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.310957 4702 topology_manager.go:138] "Creating topology manager with none policy" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.310971 4702 container_manager_linux.go:303] "Creating device plugin manager" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.311538 4702 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.311577 4702 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.312559 4702 state_mem.go:36] "Initialized new in-memory state store" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.312670 4702 server.go:1245] "Using root directory" path="/var/lib/kubelet" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.315970 4702 kubelet.go:418] "Attempting to sync node with API server" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.315993 4702 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.316022 4702 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.316036 4702 kubelet.go:324] "Adding apiserver pod source" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.316054 4702 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.322776 4702 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.323680 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.323796 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.323822 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.323867 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.324240 4702 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.327476 4702 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329051 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329083 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329093 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329102 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329116 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329128 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329137 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329151 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329163 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329172 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329187 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.329198 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.331210 4702 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.331784 4702 server.go:1280] "Started kubelet" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.331939 4702 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.332725 4702 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.332721 4702 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.334520 4702 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Nov 25 10:31:43 crc systemd[1]: Started Kubernetes Kubelet. Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.335829 4702 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.335926 4702 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.336038 4702 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 04:43:15.403697196 +0000 UTC Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.336135 4702 volume_manager.go:287] "The desired_state_of_world populator starts" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.336169 4702 volume_manager.go:289] "Starting Kubelet Volume Manager" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.336934 4702 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.337093 4702 server.go:460] "Adding debug handlers to kubelet server" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.336142 4702 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 762h11m32.067562544s for next certificate rotation Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.337743 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.337870 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.341658 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="200ms" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.341594 4702 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.344113 4702 factory.go:55] Registering systemd factory Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.344232 4702 factory.go:221] Registration of the systemd container factory successfully Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.346706 4702 factory.go:153] Registering CRI-O factory Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.347932 4702 factory.go:221] Registration of the crio container factory successfully Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.348431 4702 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.349375 4702 factory.go:103] Registering Raw factory Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.349539 4702 manager.go:1196] Started watching for new ooms in manager Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.347929 4702 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.46:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187b3951fc2ea40d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 10:31:43.331730445 +0000 UTC m=+0.698326134,LastTimestamp:2025-11-25 10:31:43.331730445 +0000 UTC m=+0.698326134,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.355109 4702 manager.go:319] Starting recovery of all containers Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360321 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360375 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360388 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360399 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360412 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360424 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360436 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360528 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360543 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360557 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360566 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360576 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360585 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360598 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360611 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360670 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360679 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360689 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360698 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360711 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360721 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360735 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360747 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360784 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360842 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360856 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360868 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360881 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360890 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360919 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.360928 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361005 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361019 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361029 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361039 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361050 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361064 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361074 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361085 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361251 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361264 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361273 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361329 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361340 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361350 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361361 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361371 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361399 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361409 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361419 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361429 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361470 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361517 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361528 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361538 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361567 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361578 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361587 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361598 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361609 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361619 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361628 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361638 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361669 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361679 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361694 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361785 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361795 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361811 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361820 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361830 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361895 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361918 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361929 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361939 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361947 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.361957 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362019 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362030 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362078 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362093 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362104 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362112 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362127 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362137 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362227 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362239 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362271 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362281 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362292 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362305 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362314 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362323 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.362401 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.368990 4702 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369056 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369078 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369090 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369100 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369112 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369122 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369132 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369146 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369156 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369167 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369184 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369198 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369210 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369221 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369234 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369246 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369258 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369269 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369279 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369289 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369301 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369629 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369640 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369651 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369663 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369674 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369684 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369694 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369704 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369713 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369723 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369736 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369747 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369756 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369767 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369778 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369788 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369800 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369811 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369821 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369830 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369840 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369852 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369862 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369872 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369882 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369908 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369919 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369929 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369939 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369951 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369961 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369973 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369983 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.369992 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370004 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370015 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370025 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370034 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370044 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370054 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370064 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370074 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370084 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370095 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370105 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370121 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370136 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370145 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370188 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370202 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370213 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370227 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370237 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370248 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370260 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370270 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370280 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370289 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370299 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370315 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370324 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370334 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370343 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370352 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370363 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370372 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370382 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370393 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370408 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370419 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370430 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370441 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370451 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370463 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370472 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370482 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370491 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370501 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370511 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370521 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370531 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370540 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370551 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370567 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370783 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370809 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370823 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370844 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370858 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370882 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370910 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370925 4702 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370937 4702 reconstruct.go:97] "Volume reconstruction finished" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.370951 4702 reconciler.go:26] "Reconciler: start to sync state" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.379382 4702 manager.go:324] Recovery completed Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.391400 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.395303 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.395349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.395362 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.396565 4702 cpu_manager.go:225] "Starting CPU manager" policy="none" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.396589 4702 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.396682 4702 state_mem.go:36] "Initialized new in-memory state store" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.398117 4702 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.400849 4702 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.400912 4702 status_manager.go:217] "Starting to sync pod status with apiserver" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.400947 4702 kubelet.go:2335] "Starting kubelet main sync loop" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.400998 4702 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.401565 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.401609 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.421701 4702 policy_none.go:49] "None policy: Start" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.422732 4702 memory_manager.go:170] "Starting memorymanager" policy="None" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.422772 4702 state_mem.go:35] "Initializing new in-memory state store" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.444660 4702 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.488917 4702 manager.go:334] "Starting Device Plugin manager" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.488977 4702 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.488994 4702 server.go:79] "Starting device plugin registration server" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.489433 4702 eviction_manager.go:189] "Eviction manager: starting control loop" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.489450 4702 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.489609 4702 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.489701 4702 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.489719 4702 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.500039 4702 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.501133 4702 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc","openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.501213 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.502241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.502276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.502287 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.502419 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.502618 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.502650 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503560 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503591 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503603 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503756 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503855 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503941 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503955 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.503907 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504027 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504534 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504553 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504562 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504593 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504610 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504619 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504750 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504844 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.504866 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505468 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505490 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505501 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505501 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505584 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505597 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505698 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505783 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.505814 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506389 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506418 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506422 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506438 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506448 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506822 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.506844 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.507426 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.507447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.507456 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.542806 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="400ms" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.574621 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.574708 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.574749 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.574782 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.574950 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575022 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575061 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575093 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575122 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575164 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575253 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575298 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575327 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575353 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.575376 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.590006 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.591447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.591485 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.591494 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.591526 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.591977 4702 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.46:6443: connect: connection refused" node="crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677043 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677112 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677135 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677183 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677209 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677230 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677253 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677255 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677275 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677297 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677296 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677318 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677257 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677345 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677366 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677380 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677387 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677418 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677426 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677433 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677388 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677458 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677488 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677503 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677510 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677520 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677526 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677468 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677565 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.677591 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.792793 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.794078 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.794139 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.794155 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.794192 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.795027 4702 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.46:6443: connect: connection refused" node="crc" Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.812593 4702 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.46:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.187b3951fc2ea40d default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 10:31:43.331730445 +0000 UTC m=+0.698326134,LastTimestamp:2025-11-25 10:31:43.331730445 +0000 UTC m=+0.698326134,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.840303 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.855302 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.876932 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.894182 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-91edc8ad7fa1611b5f0ab9ea0ac7eb584845cb984e9d759f441c006dc8db9d46 WatchSource:0}: Error finding container 91edc8ad7fa1611b5f0ab9ea0ac7eb584845cb984e9d759f441c006dc8db9d46: Status 404 returned error can't find the container with id 91edc8ad7fa1611b5f0ab9ea0ac7eb584845cb984e9d759f441c006dc8db9d46 Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.894866 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-d86446e8c398ea1e2e269e5e5e4025525c09820c7c78caf67d65c5429d4d65f9 WatchSource:0}: Error finding container d86446e8c398ea1e2e269e5e5e4025525c09820c7c78caf67d65c5429d4d65f9: Status 404 returned error can't find the container with id d86446e8c398ea1e2e269e5e5e4025525c09820c7c78caf67d65c5429d4d65f9 Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.897874 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.899825 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-354e377a9605bb48a1b541f66a58f3139c5a0c8b5d682521dff03a5f21783183 WatchSource:0}: Error finding container 354e377a9605bb48a1b541f66a58f3139c5a0c8b5d682521dff03a5f21783183: Status 404 returned error can't find the container with id 354e377a9605bb48a1b541f66a58f3139c5a0c8b5d682521dff03a5f21783183 Nov 25 10:31:43 crc kubenswrapper[4702]: I1125 10:31:43.906653 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:43 crc kubenswrapper[4702]: W1125 10:31:43.925792 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-0ae63578f964bca22c88e97a130b0368a960f0b4342b3d70f023a9a53e224451 WatchSource:0}: Error finding container 0ae63578f964bca22c88e97a130b0368a960f0b4342b3d70f023a9a53e224451: Status 404 returned error can't find the container with id 0ae63578f964bca22c88e97a130b0368a960f0b4342b3d70f023a9a53e224451 Nov 25 10:31:43 crc kubenswrapper[4702]: E1125 10:31:43.944482 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="800ms" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.196167 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.198759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.198811 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.198820 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.198846 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:31:44 crc kubenswrapper[4702]: E1125 10:31:44.199969 4702 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.46:6443: connect: connection refused" node="crc" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.332878 4702 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:44 crc kubenswrapper[4702]: W1125 10:31:44.376160 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:44 crc kubenswrapper[4702]: E1125 10:31:44.376267 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.405474 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"91edc8ad7fa1611b5f0ab9ea0ac7eb584845cb984e9d759f441c006dc8db9d46"} Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.406428 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"0ae63578f964bca22c88e97a130b0368a960f0b4342b3d70f023a9a53e224451"} Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.408034 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"56e4d99581677045182b0f5fbeb1a6240c3c1e5c75f83299a472c2bcd9758c2b"} Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.409062 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"354e377a9605bb48a1b541f66a58f3139c5a0c8b5d682521dff03a5f21783183"} Nov 25 10:31:44 crc kubenswrapper[4702]: I1125 10:31:44.410058 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d86446e8c398ea1e2e269e5e5e4025525c09820c7c78caf67d65c5429d4d65f9"} Nov 25 10:31:44 crc kubenswrapper[4702]: W1125 10:31:44.534779 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:44 crc kubenswrapper[4702]: E1125 10:31:44.534867 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:44 crc kubenswrapper[4702]: W1125 10:31:44.688450 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:44 crc kubenswrapper[4702]: E1125 10:31:44.688547 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:44 crc kubenswrapper[4702]: E1125 10:31:44.745440 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="1.6s" Nov 25 10:31:44 crc kubenswrapper[4702]: W1125 10:31:44.845498 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:44 crc kubenswrapper[4702]: E1125 10:31:44.845679 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.000968 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.003159 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.003191 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.003201 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.003225 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:31:45 crc kubenswrapper[4702]: E1125 10:31:45.003794 4702 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.46:6443: connect: connection refused" node="crc" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.334016 4702 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.414177 4702 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6" exitCode=0 Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.414339 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.414601 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.415080 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.415115 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.415127 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.416054 4702 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd" exitCode=0 Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.416111 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.416165 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.416876 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.416923 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.416936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.418613 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.418641 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.418654 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.418666 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.418657 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.419865 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.419995 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.420088 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.421023 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade" exitCode=0 Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.421079 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.421141 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.423320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.423360 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.423375 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.424961 4702 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd" exitCode=0 Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.424999 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd"} Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425074 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425302 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425801 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425822 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425834 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425890 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:45 crc kubenswrapper[4702]: I1125 10:31:45.425917 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:46 crc kubenswrapper[4702]: W1125 10:31:46.007103 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:46 crc kubenswrapper[4702]: E1125 10:31:46.007644 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.46:6443: connect: connection refused" logger="UnhandledError" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.333200 4702 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:46 crc kubenswrapper[4702]: E1125 10:31:46.347072 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="3.2s" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.432794 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.432867 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.432809 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.432927 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.434080 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.434114 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.434125 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.436941 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.436988 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.436998 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.437006 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.439572 4702 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868" exitCode=0 Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.439613 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.439756 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.441008 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.441038 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.441048 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.442981 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.442967 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"404f34fc23977c4fbd704b63606be47b6607d02d850eb2cf09280abedf2afc97"} Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.443086 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.444416 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.444463 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.444475 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.444600 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.444639 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.444651 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.604298 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.605946 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.606010 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.606027 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:46 crc kubenswrapper[4702]: I1125 10:31:46.606066 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:31:46 crc kubenswrapper[4702]: E1125 10:31:46.606590 4702 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.46:6443: connect: connection refused" node="crc" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.333521 4702 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.46:6443: connect: connection refused Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.448594 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1"} Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.448667 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.449530 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.449559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.449572 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.450619 4702 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e" exitCode=0 Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.450656 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e"} Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.450695 4702 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.450720 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.450749 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.450695 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451861 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451882 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451724 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451928 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451938 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.451719 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.452009 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.452019 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:47 crc kubenswrapper[4702]: I1125 10:31:47.806692 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.456556 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104"} Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.456622 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c"} Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.456639 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.456643 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c"} Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.457585 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.457622 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:48 crc kubenswrapper[4702]: I1125 10:31:48.457633 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.198168 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.198350 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.199843 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.199957 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.199982 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.467664 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044"} Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.467736 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.467748 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046"} Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.467712 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.468767 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.468802 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.468813 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.469007 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.469033 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.469043 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.763134 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.763417 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.764952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.764993 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.765005 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.807567 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.809331 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.809386 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.809407 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:49 crc kubenswrapper[4702]: I1125 10:31:49.809449 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.302948 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.472172 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.472172 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.473793 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.473859 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.473806 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.473997 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.474022 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:50 crc kubenswrapper[4702]: I1125 10:31:50.473959 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.060203 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.207188 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.207423 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.209198 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.209238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.209254 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.260430 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.261849 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.263929 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.264038 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.264111 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.268502 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.475602 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.475701 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.477242 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.477301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.477328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.477374 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.477414 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:51 crc kubenswrapper[4702]: I1125 10:31:51.477436 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.199092 4702 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.199213 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": context deadline exceeded (Client.Timeout exceeded while awaiting headers)" Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.888040 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.888236 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.889391 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.889435 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:52 crc kubenswrapper[4702]: I1125 10:31:52.889444 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:53 crc kubenswrapper[4702]: E1125 10:31:53.501005 4702 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.390065 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.390357 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.392675 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.392742 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.392753 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.526204 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.526421 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.527680 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.527712 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.527724 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:54 crc kubenswrapper[4702]: I1125 10:31:54.533268 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:31:55 crc kubenswrapper[4702]: I1125 10:31:55.487170 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:55 crc kubenswrapper[4702]: I1125 10:31:55.488376 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:55 crc kubenswrapper[4702]: I1125 10:31:55.488439 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:55 crc kubenswrapper[4702]: I1125 10:31:55.488456 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:57 crc kubenswrapper[4702]: W1125 10:31:57.485153 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.485257 4702 trace.go:236] Trace[136118229]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 10:31:47.483) (total time: 10001ms): Nov 25 10:31:57 crc kubenswrapper[4702]: Trace[136118229]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:31:57.485) Nov 25 10:31:57 crc kubenswrapper[4702]: Trace[136118229]: [10.001751004s] [10.001751004s] END Nov 25 10:31:57 crc kubenswrapper[4702]: E1125 10:31:57.485287 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 25 10:31:57 crc kubenswrapper[4702]: W1125 10:31:57.592530 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.592637 4702 trace.go:236] Trace[214566005]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 10:31:47.591) (total time: 10001ms): Nov 25 10:31:57 crc kubenswrapper[4702]: Trace[214566005]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:31:57.592) Nov 25 10:31:57 crc kubenswrapper[4702]: Trace[214566005]: [10.001245629s] [10.001245629s] END Nov 25 10:31:57 crc kubenswrapper[4702]: E1125 10:31:57.592669 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 25 10:31:57 crc kubenswrapper[4702]: W1125 10:31:57.595103 4702 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.595191 4702 trace.go:236] Trace[77988716]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 10:31:47.593) (total time: 10001ms): Nov 25 10:31:57 crc kubenswrapper[4702]: Trace[77988716]: ---"Objects listed" error:Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": net/http: TLS handshake timeout 10001ms (10:31:57.595) Nov 25 10:31:57 crc kubenswrapper[4702]: Trace[77988716]: [10.001236038s] [10.001236038s] END Nov 25 10:31:57 crc kubenswrapper[4702]: E1125 10:31:57.595215 4702 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": net/http: TLS handshake timeout" logger="UnhandledError" Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.643717 4702 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Liveness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43586->192.168.126.11:17697: read: connection reset by peer" start-of-body= Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.643809 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": read tcp 192.168.126.11:43586->192.168.126.11:17697: read: connection reset by peer" Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.807278 4702 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Nov 25 10:31:57 crc kubenswrapper[4702]: I1125 10:31:57.807362 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.066673 4702 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.066750 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.072139 4702 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.072217 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.496574 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.499577 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1" exitCode=255 Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.499631 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1"} Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.499809 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.500759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.500810 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.500827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:31:58 crc kubenswrapper[4702]: I1125 10:31:58.501509 4702 scope.go:117] "RemoveContainer" containerID="062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1" Nov 25 10:31:59 crc kubenswrapper[4702]: I1125 10:31:59.504321 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 25 10:31:59 crc kubenswrapper[4702]: I1125 10:31:59.506386 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86"} Nov 25 10:31:59 crc kubenswrapper[4702]: I1125 10:31:59.506530 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:31:59 crc kubenswrapper[4702]: I1125 10:31:59.507624 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:31:59 crc kubenswrapper[4702]: I1125 10:31:59.507655 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:31:59 crc kubenswrapper[4702]: I1125 10:31:59.507667 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.315037 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.508737 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.509010 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.510056 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.510110 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.510123 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:00 crc kubenswrapper[4702]: I1125 10:32:00.516326 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:32:01 crc kubenswrapper[4702]: I1125 10:32:01.185698 4702 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 25 10:32:01 crc kubenswrapper[4702]: I1125 10:32:01.511122 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:32:01 crc kubenswrapper[4702]: I1125 10:32:01.512270 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:01 crc kubenswrapper[4702]: I1125 10:32:01.512328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:01 crc kubenswrapper[4702]: I1125 10:32:01.512343 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.198812 4702 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.199186 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.513473 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.514314 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.514376 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.514388 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:02 crc kubenswrapper[4702]: I1125 10:32:02.960641 4702 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 25 10:32:03 crc kubenswrapper[4702]: E1125 10:32:03.059021 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Nov 25 10:32:03 crc kubenswrapper[4702]: I1125 10:32:03.061372 4702 trace.go:236] Trace[1688992381]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (25-Nov-2025 10:31:49.808) (total time: 13252ms): Nov 25 10:32:03 crc kubenswrapper[4702]: Trace[1688992381]: ---"Objects listed" error: 13252ms (10:32:03.061) Nov 25 10:32:03 crc kubenswrapper[4702]: Trace[1688992381]: [13.252855685s] [13.252855685s] END Nov 25 10:32:03 crc kubenswrapper[4702]: I1125 10:32:03.061423 4702 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 25 10:32:03 crc kubenswrapper[4702]: I1125 10:32:03.062641 4702 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Nov 25 10:32:03 crc kubenswrapper[4702]: E1125 10:32:03.063450 4702 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Nov 25 10:32:03 crc kubenswrapper[4702]: E1125 10:32:03.501659 4702 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Nov 25 10:32:03 crc kubenswrapper[4702]: I1125 10:32:03.702696 4702 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.329356 4702 apiserver.go:52] "Watching apiserver" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.335575 4702 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.335934 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-h8hn4","openshift-multus/multus-dxlxj","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-target-xd92c","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-pjw7q","openshift-machine-config-operator/machine-config-daemon-g5m5h","openshift-multus/multus-additional-cni-plugins-v5gd5","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.336237 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.336321 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.336407 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.336416 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.336455 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.336472 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.336535 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.336828 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.336921 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.337025 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.337145 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.337472 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.337768 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.337880 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.341744 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.343234 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.343406 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.343410 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.343469 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.343895 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.344052 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.344755 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.345170 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.345326 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.347831 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348094 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348237 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348371 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348533 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348539 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348636 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.348855 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349002 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349167 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349404 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349405 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349624 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349674 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.349750 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.350607 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.352938 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.353062 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.353369 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.353490 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.353541 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.365303 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368238 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368295 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368333 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-etc-kubernetes\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368363 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-netd\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368386 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-env-overrides\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368412 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368437 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-cnibin\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368464 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-netns\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368483 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-systemd\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368504 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-config\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368584 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368606 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-script-lib\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368631 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-os-release\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368649 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fc7bcda9-5809-4852-8dd7-414ead106d61-cni-binary-copy\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368668 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-socket-dir-parent\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368691 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-multus-certs\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368715 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368739 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-bin\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368780 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368804 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-kubelet\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368840 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-var-lib-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368863 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-k8s-cni-cncf-io\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368886 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-cni-multus\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368943 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.368969 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovn-node-metrics-cert\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369015 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8dqmn\" (UniqueName: \"kubernetes.io/projected/fc7bcda9-5809-4852-8dd7-414ead106d61-kube-api-access-8dqmn\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369050 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-cni-bin\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369079 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369106 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369141 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-cni-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369168 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-ovn-kubernetes\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369190 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8mlzf\" (UniqueName: \"kubernetes.io/projected/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-kube-api-access-8mlzf\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369212 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-system-cni-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369232 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369255 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369276 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369299 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-node-log\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369318 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c862bae9-1615-46ec-a28d-889c38e69e81-hosts-file\") pod \"node-resolver-pjw7q\" (UID: \"c862bae9-1615-46ec-a28d-889c38e69e81\") " pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369337 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-etc-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369360 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369382 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-daemon-config\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369402 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-kubelet\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369425 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369446 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-systemd-units\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369466 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-slash\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369486 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369504 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-log-socket\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369524 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tsgrm\" (UniqueName: \"kubernetes.io/projected/c862bae9-1615-46ec-a28d-889c38e69e81-kube-api-access-tsgrm\") pod \"node-resolver-pjw7q\" (UID: \"c862bae9-1615-46ec-a28d-889c38e69e81\") " pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369543 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-netns\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369563 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369582 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-ovn\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369622 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-hostroot\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369641 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-conf-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.369795 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.370038 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.370433 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.370513 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:04.87049155 +0000 UTC m=+22.237087239 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.370729 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:04.870718716 +0000 UTC m=+22.237314415 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.371237 4702 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.371649 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.374333 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.378708 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.381212 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.381454 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.385295 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.385326 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.385340 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.385389 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:04.885374996 +0000 UTC m=+22.251970685 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.388756 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.388843 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.388862 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.388873 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.388939 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:04.888922682 +0000 UTC m=+22.255518371 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.389289 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.390717 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.394619 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.404609 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.415411 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.416118 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.428692 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.431343 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.434282 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.437816 4702 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.439424 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.449004 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.457788 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470094 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470131 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470149 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470169 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470220 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470243 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470549 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470586 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470651 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470699 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470730 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470772 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470789 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.470858 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471115 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471144 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471153 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471194 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471219 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471219 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471245 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471271 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471293 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471318 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471345 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471337 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471368 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471405 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471392 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471486 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471510 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471531 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471552 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471574 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471595 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471616 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471638 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471660 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471675 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471691 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471706 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471725 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471741 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471757 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471772 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471789 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471805 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471819 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471835 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471850 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471866 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471880 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471912 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471930 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471947 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471963 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471980 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471996 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472036 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472054 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472073 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472140 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472159 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472175 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472191 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472208 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472228 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472243 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472259 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472275 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472291 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472306 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472323 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472342 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472357 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472373 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472389 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472405 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472421 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472476 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472493 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472511 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472528 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472546 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472564 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472582 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472597 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472616 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472632 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472646 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472662 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472677 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472691 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472708 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472724 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472775 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472794 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472811 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472825 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472840 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472855 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472870 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472887 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472917 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472933 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472951 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472966 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472982 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472996 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473010 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473026 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473042 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473057 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473074 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473090 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473107 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473124 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473139 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473155 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473170 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473186 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473203 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473218 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473240 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473259 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473275 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473291 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473307 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473324 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473341 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473356 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473372 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473387 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473402 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473418 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473434 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473451 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473468 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473484 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473501 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473516 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473531 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473548 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473565 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473579 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473696 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473714 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473752 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473769 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473892 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473926 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473943 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473966 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473983 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474000 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474017 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474033 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474050 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474064 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474080 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474096 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471612 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474118 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471880 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471916 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.471961 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472118 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472257 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472270 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472407 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472470 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472563 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472661 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472702 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472716 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472849 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.472857 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473007 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473045 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473227 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473375 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473404 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473539 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473680 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.473939 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474101 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474372 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474395 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474411 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474429 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474445 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474461 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.474725 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.475008 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.475185 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.475546 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.475757 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.475808 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.475961 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476078 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476150 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476104 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476488 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476552 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476639 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.476646 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.476920 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:32:04.97688859 +0000 UTC m=+22.343484269 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.477047 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.477837 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.477847 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.477934 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.477970 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.477995 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478069 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478073 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478121 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478141 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478161 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478178 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478198 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478249 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478269 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478287 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478304 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478309 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478332 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478352 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478371 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478389 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478406 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478423 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478439 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478457 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478473 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478489 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478507 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478524 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.480289 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.480370 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.480467 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.480696 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.482556 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.482750 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.482928 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.483112 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.483194 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.483273 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.483495 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.483818 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.483958 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.484923 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.485145 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.485411 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.478553 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.485516 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.485613 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.485610 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.485952 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.486019 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.486045 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.486300 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.486522 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.486763 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.486812 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487157 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487318 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487343 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487545 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487829 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487892 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.487981 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.488054 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.488779 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.488981 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.489458 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.489628 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.479396 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490299 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490456 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490284 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490677 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490736 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490716 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490793 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490831 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490867 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490888 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490945 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490975 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.490997 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491024 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491098 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491298 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-system-cni-dir\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491366 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-node-log\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491394 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c862bae9-1615-46ec-a28d-889c38e69e81-hosts-file\") pod \"node-resolver-pjw7q\" (UID: \"c862bae9-1615-46ec-a28d-889c38e69e81\") " pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491455 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-etc-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491535 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491588 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491587 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491634 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491676 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-node-log\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491702 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-daemon-config\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491631 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491831 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491829 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-kubelet\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491868 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-kubelet\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491930 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-systemd-units\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491967 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-slash\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491977 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491990 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491997 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.492015 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-log-socket\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.492072 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-proxy-tls\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.492665 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/c862bae9-1615-46ec-a28d-889c38e69e81-hosts-file\") pod \"node-resolver-pjw7q\" (UID: \"c862bae9-1615-46ec-a28d-889c38e69e81\") " pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.492724 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/89de2be3-64db-4383-951f-0758f58ffccb-cni-binary-copy\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.492344 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.493429 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.493768 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.493809 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496017 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496256 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496332 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496392 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496441 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496725 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.497758 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496955 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.497257 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.497364 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.497408 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.497428 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.497641 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498020 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498042 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.496321 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tsgrm\" (UniqueName: \"kubernetes.io/projected/c862bae9-1615-46ec-a28d-889c38e69e81-kube-api-access-tsgrm\") pod \"node-resolver-pjw7q\" (UID: \"c862bae9-1615-46ec-a28d-889c38e69e81\") " pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498111 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498112 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b5rwk\" (UniqueName: \"kubernetes.io/projected/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-kube-api-access-b5rwk\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498212 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-netns\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498267 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-ovn\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498393 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-hostroot\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498432 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-conf-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498459 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498487 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498509 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/89de2be3-64db-4383-951f-0758f58ffccb-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498682 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498719 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498722 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.498974 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499078 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499084 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499092 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-systemd-units\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499236 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499261 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499470 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-slash\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499536 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499547 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-ovn\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499565 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-hostroot\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499586 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-conf-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499614 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499687 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-daemon-config\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499689 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499559 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.491973 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-etc-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499294 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-netns\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.499803 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-etc-kubernetes\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500078 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500150 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-os-release\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500085 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500202 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500217 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500006 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-log-socket\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500381 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-netd\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500508 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-env-overrides\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500610 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-cnibin\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500688 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-netns\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500761 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-systemd\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500835 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-config\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500949 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-script-lib\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501060 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-cnibin\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501129 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-os-release\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501196 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fc7bcda9-5809-4852-8dd7-414ead106d61-cni-binary-copy\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501261 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-socket-dir-parent\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501344 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-multus-certs\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501429 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-bin\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501498 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-rootfs\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501572 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-mcd-auth-proxy-config\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501653 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-kubelet\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501725 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-var-lib-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501791 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-k8s-cni-cncf-io\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501864 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-cni-multus\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501955 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovn-node-metrics-cert\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502037 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8dqmn\" (UniqueName: \"kubernetes.io/projected/fc7bcda9-5809-4852-8dd7-414ead106d61-kube-api-access-8dqmn\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502111 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-cni-bin\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502211 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-cni-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502308 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-ovn-kubernetes\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502387 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8mlzf\" (UniqueName: \"kubernetes.io/projected/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-kube-api-access-8mlzf\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502463 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-system-cni-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502535 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wr6xh\" (UniqueName: \"kubernetes.io/projected/89de2be3-64db-4383-951f-0758f58ffccb-kube-api-access-wr6xh\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502609 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502761 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502825 4702 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502880 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502961 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503021 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503076 4702 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503134 4702 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503192 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503260 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503310 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-systemd\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503286 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-netns\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503317 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503359 4702 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503373 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503387 4702 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503401 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503413 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503426 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503438 4702 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503449 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503462 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503474 4702 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503486 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503498 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503510 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503522 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503538 4702 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503552 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503565 4702 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503577 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503589 4702 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503601 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503614 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503627 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503640 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503651 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503663 4702 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503675 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503688 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503699 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503711 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503723 4702 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503736 4702 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503749 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503761 4702 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503773 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503784 4702 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503797 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503809 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503821 4702 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503832 4702 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503844 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503856 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503868 4702 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503881 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503893 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503921 4702 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503932 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503944 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503956 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503970 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503981 4702 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503993 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504007 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504019 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504032 4702 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504044 4702 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504056 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504068 4702 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504080 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504092 4702 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504105 4702 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504142 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504158 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504171 4702 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504183 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504194 4702 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504207 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504219 4702 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504232 4702 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504244 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504258 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504270 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504282 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504296 4702 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504308 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504321 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504333 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504346 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504356 4702 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504367 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504380 4702 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504391 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504426 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504439 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504450 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504463 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504474 4702 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504486 4702 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504497 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504510 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504522 4702 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504536 4702 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504546 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504557 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504568 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504580 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504591 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504603 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504614 4702 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504625 4702 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504637 4702 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504649 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504661 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504674 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504689 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504702 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504715 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504728 4702 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504740 4702 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504751 4702 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504763 4702 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504775 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504787 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504799 4702 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504811 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504822 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504834 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504847 4702 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504858 4702 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504870 4702 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504882 4702 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504893 4702 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504930 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504944 4702 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504957 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504970 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503260 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-cnibin\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.505521 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-config\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506166 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-script-lib\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506188 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-bin\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506221 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-ovn-kubernetes\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506259 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-cni-bin\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500025 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-etc-kubernetes\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506626 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-socket-dir-parent\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506641 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/fc7bcda9-5809-4852-8dd7-414ead106d61-cni-binary-copy\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506665 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-multus-certs\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.507153 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-kubelet\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.507197 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-var-lib-openvswitch\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.507238 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-run-k8s-cni-cncf-io\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.507276 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-host-var-lib-cni-multus\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502847 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-env-overrides\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.507434 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500388 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500548 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500616 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500626 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500638 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500722 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500827 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501118 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501247 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501366 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501501 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501564 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501800 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501843 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501890 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501919 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501984 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502074 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502079 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502460 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.502657 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503327 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.503992 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504006 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.504214 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.505322 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.506115 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.507792 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-multus-cni-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.508003 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-os-release\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500441 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-netd\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.508154 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.508200 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.508377 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.508534 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/fc7bcda9-5809-4852-8dd7-414ead106d61-system-cni-dir\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.510366 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.511864 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500236 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.500804 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.512710 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovn-node-metrics-cert\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501532 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.516793 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501137 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.501062 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.517158 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.520071 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.520125 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.520772 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.520775 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.520837 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.521681 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.521707 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.522420 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.523383 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.523688 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.523753 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.524005 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.524194 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.524680 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.524787 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.526365 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.528345 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.528613 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8mlzf\" (UniqueName: \"kubernetes.io/projected/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-kube-api-access-8mlzf\") pod \"ovnkube-node-h8hn4\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.528863 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8dqmn\" (UniqueName: \"kubernetes.io/projected/fc7bcda9-5809-4852-8dd7-414ead106d61-kube-api-access-8dqmn\") pod \"multus-dxlxj\" (UID: \"fc7bcda9-5809-4852-8dd7-414ead106d61\") " pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.529327 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.529466 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tsgrm\" (UniqueName: \"kubernetes.io/projected/c862bae9-1615-46ec-a28d-889c38e69e81-kube-api-access-tsgrm\") pod \"node-resolver-pjw7q\" (UID: \"c862bae9-1615-46ec-a28d-889c38e69e81\") " pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.542016 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.547283 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.547459 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.557143 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.558036 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.565778 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.566850 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.575867 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.585608 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.593968 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.602658 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605589 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wr6xh\" (UniqueName: \"kubernetes.io/projected/89de2be3-64db-4383-951f-0758f58ffccb-kube-api-access-wr6xh\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605627 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-system-cni-dir\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605654 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-proxy-tls\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605673 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/89de2be3-64db-4383-951f-0758f58ffccb-cni-binary-copy\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605688 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b5rwk\" (UniqueName: \"kubernetes.io/projected/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-kube-api-access-b5rwk\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605707 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/89de2be3-64db-4383-951f-0758f58ffccb-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605722 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-os-release\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605736 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605759 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-cnibin\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605775 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-rootfs\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.605790 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-mcd-auth-proxy-config\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606030 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606043 4702 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606054 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606062 4702 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606070 4702 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606078 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606086 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606094 4702 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606102 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606110 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606121 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606131 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606141 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606150 4702 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606159 4702 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606167 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606176 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606185 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606195 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606204 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606214 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606223 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606233 4702 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606244 4702 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606253 4702 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606264 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606272 4702 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606280 4702 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606289 4702 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606284 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-system-cni-dir\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606297 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606343 4702 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606356 4702 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606368 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606379 4702 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606389 4702 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606400 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606411 4702 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606422 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606438 4702 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606449 4702 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606460 4702 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606470 4702 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606480 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606491 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606501 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606512 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606534 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606546 4702 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606563 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606574 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606585 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606596 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606607 4702 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606618 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606629 4702 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606639 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606649 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606660 4702 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606672 4702 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.606870 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-mcd-auth-proxy-config\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.607135 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-cnibin\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.607206 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-os-release\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.607169 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-rootfs\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.607546 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/89de2be3-64db-4383-951f-0758f58ffccb-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.608084 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/89de2be3-64db-4383-951f-0758f58ffccb-tuning-conf-dir\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.608178 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/89de2be3-64db-4383-951f-0758f58ffccb-cni-binary-copy\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.610373 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-proxy-tls\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.621085 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.623217 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b5rwk\" (UniqueName: \"kubernetes.io/projected/5b72fbd8-190c-44a0-bdf1-ed4523f82cc2-kube-api-access-b5rwk\") pod \"machine-config-daemon-g5m5h\" (UID: \"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\") " pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.625252 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wr6xh\" (UniqueName: \"kubernetes.io/projected/89de2be3-64db-4383-951f-0758f58ffccb-kube-api-access-wr6xh\") pod \"multus-additional-cni-plugins-v5gd5\" (UID: \"89de2be3-64db-4383-951f-0758f58ffccb\") " pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.629409 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.637616 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.647028 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.655168 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.655422 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.665119 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Nov 25 10:32:04 crc kubenswrapper[4702]: W1125 10:32:04.671696 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-0bcdb340a826dc16f1012523b5251787e8a9bc9423500b3eed93c9a01611f929 WatchSource:0}: Error finding container 0bcdb340a826dc16f1012523b5251787e8a9bc9423500b3eed93c9a01611f929: Status 404 returned error can't find the container with id 0bcdb340a826dc16f1012523b5251787e8a9bc9423500b3eed93c9a01611f929 Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.676264 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.684825 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.687172 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.700069 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:04 crc kubenswrapper[4702]: W1125 10:32:04.708681 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-233278bad92d477cfc203051b086c63506c43931fd3d8f02b496f6343da3a277 WatchSource:0}: Error finding container 233278bad92d477cfc203051b086c63506c43931fd3d8f02b496f6343da3a277: Status 404 returned error can't find the container with id 233278bad92d477cfc203051b086c63506c43931fd3d8f02b496f6343da3a277 Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.712934 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-dxlxj" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.722343 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.733108 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-pjw7q" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.742544 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.750245 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.909426 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.909489 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.909519 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:04 crc kubenswrapper[4702]: I1125 10:32:04.909542 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909616 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909641 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909690 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909703 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909773 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909783 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909936 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909951 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.909674 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:05.90966042 +0000 UTC m=+23.276256109 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.910007 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:05.909991059 +0000 UTC m=+23.276586748 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.910023 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:05.91001544 +0000 UTC m=+23.276611129 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:04 crc kubenswrapper[4702]: E1125 10:32:04.910036 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:05.910031361 +0000 UTC m=+23.276627040 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.010727 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.010943 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:32:06.010912506 +0000 UTC m=+23.377508195 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.407807 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.409068 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.410790 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.411672 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.413067 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.413806 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.414650 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.415241 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.416077 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.416673 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.417224 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.417869 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.418388 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.418999 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.419540 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.420143 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.420794 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.421287 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.422017 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.422631 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.423159 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.423734 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.425199 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.426238 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.426940 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.427872 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.430093 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.430597 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.431227 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.431760 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.432252 4702 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.432352 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.433754 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.434374 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.434774 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.436076 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.436857 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.437484 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.439172 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.440323 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.440986 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.441641 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.442432 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.443182 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.443755 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.444434 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.445178 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.446116 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.446754 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.447327 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.447958 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.448624 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.450164 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.450749 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.523563 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.523793 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.523805 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"8988d17312eedf54b6df261456b79dbd7a764a1bca20b1ef160db0dd63c9ccfd"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.525584 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.525612 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.525621 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"1f817a92c02ebb44d83a143aafda326923fa0d3aa7fd42ee6c9e1d355b8aaa09"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.527391 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" exitCode=0 Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.527433 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.527466 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"b0d0d80327a02d53b7b6262ecc0ee5b33b184904471ffe565634dba2bdecd0d4"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.529637 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-pjw7q" event={"ID":"c862bae9-1615-46ec-a28d-889c38e69e81","Type":"ContainerStarted","Data":"151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.529693 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-pjw7q" event={"ID":"c862bae9-1615-46ec-a28d-889c38e69e81","Type":"ContainerStarted","Data":"2330fe61acbc28afaa3351471b67992c1f2f844118b3f6e299a82ab478f42f64"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.531254 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.531323 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"0bcdb340a826dc16f1012523b5251787e8a9bc9423500b3eed93c9a01611f929"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.532557 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerStarted","Data":"8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.532609 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerStarted","Data":"93d3c5e320fd030157401c07d82008e24ec3293d1fa7e88bc971b2abe3bd5fa6"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.534416 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.534805 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.536217 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86" exitCode=255 Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.536270 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.536317 4702 scope.go:117] "RemoveContainer" containerID="062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.539842 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerStarted","Data":"9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.539887 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerStarted","Data":"03c329c3c0fc8c885d2d0a729b5e1b44ef5bd9b7dd46ca9fb43f6dced4676ce4"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.541341 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"233278bad92d477cfc203051b086c63506c43931fd3d8f02b496f6343da3a277"} Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.545989 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.550958 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.551466 4702 scope.go:117] "RemoveContainer" containerID="499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86" Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.551663 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.556511 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.569303 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.579872 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.591366 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.605230 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.617794 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.626815 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.637128 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.654766 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.663759 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.673939 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.685119 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.711744 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.726541 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.748482 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.761189 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.775514 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.785182 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.805606 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.816443 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.827681 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.840290 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.852825 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:31:57Z\\\",\\\"message\\\":\\\"W1125 10:31:46.816923 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1125 10:31:46.817354 1 crypto.go:601] Generating new CA for check-endpoints-signer@1764066706 cert, and key in /tmp/serving-cert-1911765727/serving-signer.crt, /tmp/serving-cert-1911765727/serving-signer.key\\\\nI1125 10:31:47.121636 1 observer_polling.go:159] Starting file observer\\\\nW1125 10:31:47.124747 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1125 10:31:47.124937 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1125 10:31:47.125544 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1911765727/tls.crt::/tmp/serving-cert-1911765727/tls.key\\\\\\\"\\\\nF1125 10:31:57.638797 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.864080 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.918313 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.918363 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.918388 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:05 crc kubenswrapper[4702]: I1125 10:32:05.918410 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918517 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918553 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918524 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918605 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:07.918585177 +0000 UTC m=+25.285180876 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918627 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:07.918618918 +0000 UTC m=+25.285214607 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918608 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918646 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918677 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:07.918670179 +0000 UTC m=+25.285265958 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918529 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918709 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918717 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:05 crc kubenswrapper[4702]: E1125 10:32:05.918740 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:07.918732641 +0000 UTC m=+25.285328420 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.019338 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:06 crc kubenswrapper[4702]: E1125 10:32:06.019519 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:32:08.019490293 +0000 UTC m=+25.386086012 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.401411 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.401480 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.401411 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:06 crc kubenswrapper[4702]: E1125 10:32:06.401557 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:06 crc kubenswrapper[4702]: E1125 10:32:06.401611 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:06 crc kubenswrapper[4702]: E1125 10:32:06.401794 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.456288 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-qdjc6"] Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.456842 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.459467 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.460349 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.460384 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.461457 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.474196 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://062f4a3c43c74a5eb241ac386d12fcdf7bf2bb4e8d569cbc7ae0e08dd4062bb1\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:31:57Z\\\",\\\"message\\\":\\\"W1125 10:31:46.816923 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI1125 10:31:46.817354 1 crypto.go:601] Generating new CA for check-endpoints-signer@1764066706 cert, and key in /tmp/serving-cert-1911765727/serving-signer.crt, /tmp/serving-cert-1911765727/serving-signer.key\\\\nI1125 10:31:47.121636 1 observer_polling.go:159] Starting file observer\\\\nW1125 10:31:47.124747 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI1125 10:31:47.124937 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI1125 10:31:47.125544 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1911765727/tls.crt::/tmp/serving-cert-1911765727/tls.key\\\\\\\"\\\\nF1125 10:31:57.638797 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": net/http: TLS handshake timeout\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.487524 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.504473 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.513757 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.523673 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.536179 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.547270 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.547536 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.547630 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.548467 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.551800 4702 scope.go:117] "RemoveContainer" containerID="499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.551872 4702 generic.go:334] "Generic (PLEG): container finished" podID="89de2be3-64db-4383-951f-0758f58ffccb" containerID="8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8" exitCode=0 Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.551938 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerDied","Data":"8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8"} Nov 25 10:32:06 crc kubenswrapper[4702]: E1125 10:32:06.552291 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.552715 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.564551 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.578256 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.598274 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.609763 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.624926 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ec475b07-7bf8-4c93-bfae-b60284870514-serviceca\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.625015 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq549\" (UniqueName: \"kubernetes.io/projected/ec475b07-7bf8-4c93-bfae-b60284870514-kube-api-access-wq549\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.625226 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec475b07-7bf8-4c93-bfae-b60284870514-host\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.653636 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.668095 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.683467 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.698195 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.714865 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.725999 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq549\" (UniqueName: \"kubernetes.io/projected/ec475b07-7bf8-4c93-bfae-b60284870514-kube-api-access-wq549\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.726055 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec475b07-7bf8-4c93-bfae-b60284870514-host\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.726089 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ec475b07-7bf8-4c93-bfae-b60284870514-serviceca\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.726132 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/ec475b07-7bf8-4c93-bfae-b60284870514-host\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.728019 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/ec475b07-7bf8-4c93-bfae-b60284870514-serviceca\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.730639 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.749204 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.750266 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq549\" (UniqueName: \"kubernetes.io/projected/ec475b07-7bf8-4c93-bfae-b60284870514-kube-api-access-wq549\") pod \"node-ca-qdjc6\" (UID: \"ec475b07-7bf8-4c93-bfae-b60284870514\") " pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.760636 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.768793 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qdjc6" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.774569 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: W1125 10:32:06.782154 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podec475b07_7bf8_4c93_bfae_b60284870514.slice/crio-a139a7d325d0318c548f66277010bfa9e793dc786f6933c7473194080165f498 WatchSource:0}: Error finding container a139a7d325d0318c548f66277010bfa9e793dc786f6933c7473194080165f498: Status 404 returned error can't find the container with id a139a7d325d0318c548f66277010bfa9e793dc786f6933c7473194080165f498 Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.792599 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.806669 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.825064 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.842616 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.856709 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.876306 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.891738 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.907760 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:06 crc kubenswrapper[4702]: I1125 10:32:06.988413 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.559690 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.559755 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.559776 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.561774 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerStarted","Data":"a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.564182 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.565874 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qdjc6" event={"ID":"ec475b07-7bf8-4c93-bfae-b60284870514","Type":"ContainerStarted","Data":"321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.565945 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qdjc6" event={"ID":"ec475b07-7bf8-4c93-bfae-b60284870514","Type":"ContainerStarted","Data":"a139a7d325d0318c548f66277010bfa9e793dc786f6933c7473194080165f498"} Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.566462 4702 scope.go:117] "RemoveContainer" containerID="499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86" Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.566621 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.580632 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.592558 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.607005 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.624679 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.640378 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.653211 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.677180 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.689942 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.703109 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.715667 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.724933 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.741974 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.761426 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.775337 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.792515 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.806962 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.825888 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.841307 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.852843 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.867295 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.881002 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.899091 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.916319 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.940839 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.940890 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.940962 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.941001 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941040 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941089 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941099 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941123 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941134 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:11.941105083 +0000 UTC m=+29.307700832 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941139 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941157 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:11.941146774 +0000 UTC m=+29.307742463 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941189 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:11.941168005 +0000 UTC m=+29.307763764 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.940837 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941229 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941276 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941301 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:07 crc kubenswrapper[4702]: E1125 10:32:07.941435 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:11.941406592 +0000 UTC m=+29.308002321 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.955633 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.984606 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:07 crc kubenswrapper[4702]: I1125 10:32:07.999595 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.013474 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.042224 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:08 crc kubenswrapper[4702]: E1125 10:32:08.042462 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:32:12.042444002 +0000 UTC m=+29.409039701 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.401772 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.401828 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.401868 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:08 crc kubenswrapper[4702]: E1125 10:32:08.401931 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:08 crc kubenswrapper[4702]: E1125 10:32:08.402015 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:08 crc kubenswrapper[4702]: E1125 10:32:08.402079 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.570944 4702 generic.go:334] "Generic (PLEG): container finished" podID="89de2be3-64db-4383-951f-0758f58ffccb" containerID="a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80" exitCode=0 Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.571017 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerDied","Data":"a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80"} Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.586515 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.608664 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.627448 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.646468 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.657651 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.669068 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.682856 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.700314 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.712526 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.726557 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.738848 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.759752 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.773918 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.793254 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.820989 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.852616 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.872774 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.889269 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.902218 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.926103 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.937063 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.949082 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.965414 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.978491 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:08 crc kubenswrapper[4702]: I1125 10:32:08.989784 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.007192 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.019616 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.035617 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.204798 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.211068 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.216464 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.228311 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.245797 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.264919 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.286151 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.302926 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.318078 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.331658 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.352084 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.363973 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.377655 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.389343 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.404016 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.426513 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.454136 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.464342 4702 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.466909 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.466952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.466961 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.467063 4702 kubelet_node_status.go:76] "Attempting to register node" node="crc" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.468443 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.475231 4702 kubelet_node_status.go:115] "Node was previously registered" node="crc" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.476351 4702 kubelet_node_status.go:79] "Successfully registered node" node="crc" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.477762 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.477804 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.477815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.477830 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.477839 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.487169 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: E1125 10:32:09.501369 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.506092 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.506154 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.506174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.506156 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.506200 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.506250 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: E1125 10:32:09.519497 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.522569 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.522601 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.522609 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.522625 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.522635 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: E1125 10:32:09.536470 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.539925 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.539960 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.539973 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.540033 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.540046 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.541888 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: E1125 10:32:09.552698 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.556175 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.556225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.556241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.556257 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.556267 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: E1125 10:32:09.570503 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: E1125 10:32:09.571014 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.572922 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.572964 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.572976 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.572993 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.573004 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.575014 4702 generic.go:334] "Generic (PLEG): container finished" podID="89de2be3-64db-4383-951f-0758f58ffccb" containerID="1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac" exitCode=0 Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.575158 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerDied","Data":"1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.588354 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.618538 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.660237 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.675701 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.675747 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.675759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.675778 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.675791 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.701821 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.744439 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.779828 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.779870 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.779887 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.779929 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.779940 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.786866 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.820008 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.858256 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.882381 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.882425 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.882436 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.882454 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.882466 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.907790 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.941871 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.985302 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.985362 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.985381 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.985403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.985417 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:09Z","lastTransitionTime":"2025-11-25T10:32:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:09 crc kubenswrapper[4702]: I1125 10:32:09.986596 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.026419 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.086613 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.088809 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.088862 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.088884 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.088947 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.088970 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.101003 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.140253 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.191772 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.191824 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.191842 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.191862 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.191876 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.209279 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.222582 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.262387 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.295138 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.295194 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.295209 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.295231 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.295246 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.303481 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.341048 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.389552 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.398308 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.398339 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.398349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.398365 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.398376 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.401891 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.401892 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:10 crc kubenswrapper[4702]: E1125 10:32:10.402079 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:10 crc kubenswrapper[4702]: E1125 10:32:10.402005 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.402090 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:10 crc kubenswrapper[4702]: E1125 10:32:10.402305 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.419816 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.461723 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.500409 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.500579 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.500603 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.500610 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.500625 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.500652 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.539742 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.581670 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.582109 4702 generic.go:334] "Generic (PLEG): container finished" podID="89de2be3-64db-4383-951f-0758f58ffccb" containerID="e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85" exitCode=0 Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.582214 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerDied","Data":"e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.587764 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.604706 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.604796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.604821 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.604853 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.604872 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.633777 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.666189 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.707863 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.707924 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.707936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.707954 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.707966 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.711290 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.749850 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.783587 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.810752 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.810792 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.810802 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.810815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.810824 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.820496 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.860649 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.911365 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.915602 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.915638 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.915648 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.915662 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.915674 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:10Z","lastTransitionTime":"2025-11-25T10:32:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.938991 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:10 crc kubenswrapper[4702]: I1125 10:32:10.979722 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:10Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.017876 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.017937 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.017948 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.017963 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.017972 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.021677 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.065890 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.100575 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.120230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.120272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.120284 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.120303 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.120315 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.141589 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.181482 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.223220 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.223267 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.223280 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.223298 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.223309 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.325808 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.325863 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.325875 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.325893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.325947 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.428662 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.428701 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.428711 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.428725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.428734 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.532639 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.532710 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.532733 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.532763 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.532786 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.597351 4702 generic.go:334] "Generic (PLEG): container finished" podID="89de2be3-64db-4383-951f-0758f58ffccb" containerID="18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08" exitCode=0 Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.597435 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerDied","Data":"18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.622858 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.635674 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.635749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.635765 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.635790 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.635819 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.637949 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.654861 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.670354 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.693603 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.706692 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.718972 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.735890 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.738301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.738344 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.738357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.738374 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.738386 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.747710 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.760123 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.774646 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.794546 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.810485 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.823415 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.835989 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.840608 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.840643 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.840653 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.840668 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.840677 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.943249 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.943309 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.943320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.943338 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:11 crc kubenswrapper[4702]: I1125 10:32:11.943352 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:11Z","lastTransitionTime":"2025-11-25T10:32:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.005130 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.005189 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005330 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005376 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005399 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005414 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005416 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:20.005394471 +0000 UTC m=+37.371990180 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005475 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:20.005461243 +0000 UTC m=+37.372056932 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.005503 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.005540 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005562 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005616 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005625 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005631 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005665 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:20.005658589 +0000 UTC m=+37.372254278 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.005695 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:20.0056757 +0000 UTC m=+37.372271499 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.045273 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.045317 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.045325 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.045341 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.045353 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.106100 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.106520 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:32:20.106491413 +0000 UTC m=+37.473087102 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.148031 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.148086 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.148099 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.148118 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.148132 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.251889 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.251999 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.252044 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.252070 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.252110 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.359367 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.359435 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.359456 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.359483 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.359500 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.401263 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.401364 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.401364 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.401493 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.401593 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:12 crc kubenswrapper[4702]: E1125 10:32:12.401952 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.462828 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.462881 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.462894 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.462934 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.462950 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.565409 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.565445 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.565454 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.565468 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.565476 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.603391 4702 generic.go:334] "Generic (PLEG): container finished" podID="89de2be3-64db-4383-951f-0758f58ffccb" containerID="9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045" exitCode=0 Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.603464 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerDied","Data":"9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.608681 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.609007 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.609042 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.621981 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.646305 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.651357 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.655613 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.665772 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.667850 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.667878 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.667889 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.667919 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.667931 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.681028 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.697933 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.716202 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.735074 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.749442 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.763142 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.772942 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.772984 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.773014 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.773030 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.773041 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.778432 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.798750 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.812230 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.829119 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.844951 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.858891 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.874190 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.876262 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.876313 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.876328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.876348 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.876360 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.896207 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.909496 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.927039 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.942010 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.957535 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.971669 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.984399 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.986577 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.986646 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.986661 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.986679 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.986690 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:12Z","lastTransitionTime":"2025-11-25T10:32:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:12 crc kubenswrapper[4702]: I1125 10:32:12.996690 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:12Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.016791 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.035326 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.050013 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.063703 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.079845 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.095696 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.095751 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.095764 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.095793 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.095809 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.096612 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.198374 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.198413 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.198429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.198448 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.198459 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.300788 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.301085 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.301157 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.301230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.301291 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.403363 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.403608 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.403706 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.403779 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.403845 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.419297 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.437667 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.451365 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.465450 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.481394 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.498930 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.509153 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.509205 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.509214 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.509230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.509239 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.522333 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.538838 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.554178 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.573963 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.587814 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.600520 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.611791 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.611850 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.611868 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.611919 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.611939 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.619999 4702 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.620210 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" event={"ID":"89de2be3-64db-4383-951f-0758f58ffccb","Type":"ContainerStarted","Data":"c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.620429 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.640035 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.653347 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.677301 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.696923 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.714499 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.714526 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.714534 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.714547 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.714555 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.718737 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.740110 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.781560 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.816543 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.816589 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.816598 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.816613 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.816623 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.819943 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.859006 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.907434 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.919380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.919879 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.920071 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.920206 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.920326 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:13Z","lastTransitionTime":"2025-11-25T10:32:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.939491 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:13 crc kubenswrapper[4702]: I1125 10:32:13.980706 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.021665 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:14Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.022721 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.022763 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.022777 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.022798 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.022812 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.062809 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:14Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.101515 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:14Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.125177 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.125225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.125238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.125256 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.125267 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.143742 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:14Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.180527 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:14Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.228006 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.228076 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.228093 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.228119 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.228136 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.330460 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.330509 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.330519 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.330535 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.330546 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.401587 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.401624 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.401599 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:14 crc kubenswrapper[4702]: E1125 10:32:14.401738 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:14 crc kubenswrapper[4702]: E1125 10:32:14.401798 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:14 crc kubenswrapper[4702]: E1125 10:32:14.401849 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.433375 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.433440 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.433449 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.433464 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.433482 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.536070 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.536119 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.536131 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.536148 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.536160 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.624422 4702 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.639161 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.639205 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.639219 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.639239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.639253 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.741985 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.742040 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.742049 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.742065 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.742079 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.844862 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.844938 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.844952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.844973 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.844987 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.950979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.951037 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.951051 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.951069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:14 crc kubenswrapper[4702]: I1125 10:32:14.951081 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:14Z","lastTransitionTime":"2025-11-25T10:32:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.054736 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.055216 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.055238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.055266 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.055289 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.159148 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.159206 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.159221 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.159245 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.159261 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.262391 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.262777 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.262867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.262992 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.263087 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.366109 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.366372 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.366515 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.366675 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.366786 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.470356 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.470418 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.470437 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.470462 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.470477 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.573660 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.573701 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.573710 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.573725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.573733 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.631028 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/0.log" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.635610 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8" exitCode=1 Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.635672 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.637022 4702 scope.go:117] "RemoveContainer" containerID="bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.657094 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.677519 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.677564 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.677577 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.677594 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.677606 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.683117 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.704434 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.726542 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.770598 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.780129 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.780202 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.780213 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.780232 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.780277 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.786269 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.801360 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.818393 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.835020 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.854852 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.871320 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.888430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.888477 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.888489 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.888507 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.888517 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.889056 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.909274 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.922611 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.940157 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:15Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.991484 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.991531 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.991542 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.991559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:15 crc kubenswrapper[4702]: I1125 10:32:15.991570 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:15Z","lastTransitionTime":"2025-11-25T10:32:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.097458 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.097633 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.097946 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.097973 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.097984 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.201640 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.201683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.201695 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.201714 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.201747 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.210566 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2"] Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.211161 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.213750 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.215098 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.229361 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.241682 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.251023 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hf7fv\" (UniqueName: \"kubernetes.io/projected/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-kube-api-access-hf7fv\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.251062 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.251097 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.251119 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.255269 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.266874 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.282253 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.296374 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.303764 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.303795 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.303803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.303816 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.303825 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.315054 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.327440 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.343170 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.352144 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hf7fv\" (UniqueName: \"kubernetes.io/projected/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-kube-api-access-hf7fv\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.352197 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.352238 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.352262 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.352839 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-env-overrides\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.353687 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.357524 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.359230 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.367840 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hf7fv\" (UniqueName: \"kubernetes.io/projected/760a5ac7-2fa3-420b-ae5f-2739a89dd67c-kube-api-access-hf7fv\") pod \"ovnkube-control-plane-749d76644c-shfm2\" (UID: \"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.376243 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.388615 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.401456 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.401483 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:16 crc kubenswrapper[4702]: E1125 10:32:16.401547 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.401601 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.401444 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: E1125 10:32:16.401827 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:16 crc kubenswrapper[4702]: E1125 10:32:16.401760 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.417995 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.438032 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.438220 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.438316 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.438421 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.438509 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.441802 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.454209 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:16Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.529085 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.541341 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.541380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.541388 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.541405 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.541416 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: W1125 10:32:16.586782 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod760a5ac7_2fa3_420b_ae5f_2739a89dd67c.slice/crio-976eb84d0db37761e8968535dfb2e5d66aa575378a22b2f0580c75e0b1842c08 WatchSource:0}: Error finding container 976eb84d0db37761e8968535dfb2e5d66aa575378a22b2f0580c75e0b1842c08: Status 404 returned error can't find the container with id 976eb84d0db37761e8968535dfb2e5d66aa575378a22b2f0580c75e0b1842c08 Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.641296 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" event={"ID":"760a5ac7-2fa3-420b-ae5f-2739a89dd67c","Type":"ContainerStarted","Data":"976eb84d0db37761e8968535dfb2e5d66aa575378a22b2f0580c75e0b1842c08"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.644103 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.644184 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.644208 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.644240 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.644264 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.746522 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.746584 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.746598 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.746663 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.746679 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.848835 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.848882 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.848895 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.848929 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.848943 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.951877 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.951941 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.951953 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.951975 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:16 crc kubenswrapper[4702]: I1125 10:32:16.951984 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:16Z","lastTransitionTime":"2025-11-25T10:32:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.055714 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.055804 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.055823 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.055850 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.055867 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.158022 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.158069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.158079 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.158096 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.158104 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.260231 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.260279 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.260289 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.260310 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.260319 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.362698 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.363064 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.363079 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.363096 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.363108 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.466336 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.466417 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.466447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.466467 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.466478 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.568545 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.568587 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.568598 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.568616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.568630 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.645780 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" event={"ID":"760a5ac7-2fa3-420b-ae5f-2739a89dd67c","Type":"ContainerStarted","Data":"6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.645837 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" event={"ID":"760a5ac7-2fa3-420b-ae5f-2739a89dd67c","Type":"ContainerStarted","Data":"6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.647469 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/0.log" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.649473 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.649621 4702 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.671424 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.671469 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.671479 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.671495 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.671505 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.681722 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.703693 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.711490 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-fnlmg"] Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.712332 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:17 crc kubenswrapper[4702]: E1125 10:32:17.712428 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.725708 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.742170 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.760003 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.768306 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.768413 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gchhd\" (UniqueName: \"kubernetes.io/projected/c8f0f344-2768-49e0-a344-81f5b457b671-kube-api-access-gchhd\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.774159 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.774213 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.774225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.774246 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.774257 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.779013 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.797870 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.814854 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.832559 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.851510 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.869120 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gchhd\" (UniqueName: \"kubernetes.io/projected/c8f0f344-2768-49e0-a344-81f5b457b671-kube-api-access-gchhd\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.869194 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:17 crc kubenswrapper[4702]: E1125 10:32:17.869374 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:17 crc kubenswrapper[4702]: E1125 10:32:17.869450 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:18.369431604 +0000 UTC m=+35.736027293 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.869606 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.876824 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.876870 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.876880 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.876919 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.876931 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.881586 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.890117 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gchhd\" (UniqueName: \"kubernetes.io/projected/c8f0f344-2768-49e0-a344-81f5b457b671-kube-api-access-gchhd\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.897126 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.911228 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.925408 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.936050 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.958011 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.972839 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.979185 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.979226 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.979236 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.979253 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.979267 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:17Z","lastTransitionTime":"2025-11-25T10:32:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:17 crc kubenswrapper[4702]: I1125 10:32:17.991141 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:17Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.008291 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.021639 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.034973 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.049584 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.064494 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.077430 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.082207 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.082272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.082288 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.082308 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.082320 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.090771 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.105564 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.125389 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.136796 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.150832 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.162154 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.175407 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.184547 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.184587 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.184597 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.184610 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.184620 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.186814 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.287175 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.287232 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.287247 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.287271 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.287289 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.374767 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:18 crc kubenswrapper[4702]: E1125 10:32:18.374998 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:18 crc kubenswrapper[4702]: E1125 10:32:18.375157 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:19.37512053 +0000 UTC m=+36.741716219 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.390094 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.390149 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.390161 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.390179 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.390192 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.401364 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.401374 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.401550 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:18 crc kubenswrapper[4702]: E1125 10:32:18.402049 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:18 crc kubenswrapper[4702]: E1125 10:32:18.401817 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:18 crc kubenswrapper[4702]: E1125 10:32:18.402124 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.493210 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.493260 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.493272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.493288 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.493297 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.595713 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.595759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.595770 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.595787 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.595801 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.654634 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/1.log" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.655535 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/0.log" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.657597 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff" exitCode=1 Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.657691 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.657749 4702 scope.go:117] "RemoveContainer" containerID="bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.658774 4702 scope.go:117] "RemoveContainer" containerID="158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff" Nov 25 10:32:18 crc kubenswrapper[4702]: E1125 10:32:18.659007 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.673698 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.688193 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.698023 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.698063 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.698072 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.698088 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.698097 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.701161 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.714031 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.727060 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.740957 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.752593 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.763809 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.784102 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.794187 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.800845 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.800892 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.800934 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.800951 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.800963 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.804479 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.813527 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.822072 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.831327 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.851648 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.863441 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.875775 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.889352 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.901261 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.903525 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.903568 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.903579 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.903599 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.903611 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:18Z","lastTransitionTime":"2025-11-25T10:32:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.912937 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.922718 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.934396 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.947629 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.960290 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.972220 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:18 crc kubenswrapper[4702]: I1125 10:32:18.986107 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:18Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.006482 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.007120 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.007216 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.007301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.007395 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.007457 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.018281 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.029600 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.041490 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.052677 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.072199 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.084849 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.102542 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.110094 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.110272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.110334 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.110406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.110489 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.212616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.212655 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.212664 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.212679 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.212690 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.315345 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.315384 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.315396 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.315413 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.315425 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.387828 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.388078 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.388151 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:21.388131891 +0000 UTC m=+38.754727580 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.402114 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.402249 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.418277 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.418328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.418339 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.418356 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.418367 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.521460 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.521556 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.521569 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.521592 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.521606 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.623609 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.623656 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.623666 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.623680 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.623689 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.662123 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/1.log" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.726391 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.726429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.726442 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.726458 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.726473 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.829793 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.829860 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.829874 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.829893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.829929 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.919603 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.919875 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.920005 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.920107 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.920191 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.934122 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.938567 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.938774 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.938839 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.938928 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.938992 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.951080 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.955025 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.955062 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.955075 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.955094 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.955106 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.968184 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.972449 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.972526 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.972537 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.972556 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.972567 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:19 crc kubenswrapper[4702]: E1125 10:32:19.984633 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:19Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.996198 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.996235 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.996246 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.996260 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:19 crc kubenswrapper[4702]: I1125 10:32:19.996269 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:19Z","lastTransitionTime":"2025-11-25T10:32:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.037241 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:19Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:20Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.037424 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.039665 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.039705 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.039769 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.039793 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.039805 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.095274 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.095344 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.095383 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095404 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095480 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095496 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095507 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095515 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.095415 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095481 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:36.095466323 +0000 UTC m=+53.462062012 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095631 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:36.095611777 +0000 UTC m=+53.462207466 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095634 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095681 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095694 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095649 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:36.095639018 +0000 UTC m=+53.462234697 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.095784 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:36.095763302 +0000 UTC m=+53.462359061 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.142502 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.142553 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.142568 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.142591 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.142608 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.196388 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.196591 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:32:36.196556954 +0000 UTC m=+53.563152653 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.245140 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.245189 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.245200 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.245216 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.245233 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.348119 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.348163 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.348171 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.348187 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.348196 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.402004 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.402077 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.402004 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.402138 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.402206 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:20 crc kubenswrapper[4702]: E1125 10:32:20.402291 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.451112 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.451155 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.451171 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.451193 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.451219 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.553790 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.554089 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.554233 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.554347 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.554448 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.656800 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.656849 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.656860 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.656881 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.656896 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.759040 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.759084 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.759093 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.759109 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.759118 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.862805 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.862872 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.862882 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.862954 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.862968 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.965296 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.965350 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.965363 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.965383 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:20 crc kubenswrapper[4702]: I1125 10:32:20.965396 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:20Z","lastTransitionTime":"2025-11-25T10:32:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.067202 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.067230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.067238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.067252 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.067260 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.169729 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.169786 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.169799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.169819 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.169832 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.271885 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.271965 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.271980 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.271997 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.272008 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.374721 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.374775 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.374787 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.374806 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.374816 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.402423 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:21 crc kubenswrapper[4702]: E1125 10:32:21.402594 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.404879 4702 scope.go:117] "RemoveContainer" containerID="499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.410759 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:21 crc kubenswrapper[4702]: E1125 10:32:21.410937 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:21 crc kubenswrapper[4702]: E1125 10:32:21.411009 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:25.410988656 +0000 UTC m=+42.777584345 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.476888 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.476970 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.476984 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.477005 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.477016 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.579968 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.580005 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.580015 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.580029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.580039 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.674124 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.676302 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.676723 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.682167 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.682210 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.682221 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.682240 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.682253 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.691581 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.704615 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.718947 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.742704 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.754814 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.768005 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.782173 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.784949 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.785190 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.785282 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.785368 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.785454 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.800318 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.813687 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.829111 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.845623 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.862596 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.884011 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.888994 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.889053 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.889068 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.889089 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.889103 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.901686 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.913388 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.927358 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.944744 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:21Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.992266 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.992320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.992330 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.992350 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:21 crc kubenswrapper[4702]: I1125 10:32:21.992363 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:21Z","lastTransitionTime":"2025-11-25T10:32:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.094822 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.094874 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.094885 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.094914 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.094924 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.197787 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.197850 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.197863 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.197882 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.197930 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.300725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.300786 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.300799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.300818 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.300835 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.401501 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.401542 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.401636 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:22 crc kubenswrapper[4702]: E1125 10:32:22.401669 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:22 crc kubenswrapper[4702]: E1125 10:32:22.401791 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:22 crc kubenswrapper[4702]: E1125 10:32:22.401940 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.403146 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.403192 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.403211 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.403235 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.403249 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.505649 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.505692 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.505704 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.505723 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.505733 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.607953 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.607985 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.607993 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.608007 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.608016 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.713876 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.714186 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.714320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.714455 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.715062 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.818447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.818495 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.818505 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.818519 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.818528 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.920893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.920948 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.920959 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.920976 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:22 crc kubenswrapper[4702]: I1125 10:32:22.920986 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:22Z","lastTransitionTime":"2025-11-25T10:32:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.023184 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.023221 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.023251 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.023272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.023283 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.125214 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.125262 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.125470 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.125491 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.125503 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.228684 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.228721 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.228729 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.228746 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.228755 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.331396 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.331447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.331476 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.331493 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.331506 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.401617 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:23 crc kubenswrapper[4702]: E1125 10:32:23.401785 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.419803 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.434165 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.434254 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.434272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.434321 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.434337 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.435182 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.448054 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.460652 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.475496 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.488834 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.505120 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.523003 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.535001 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.536286 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.536327 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.536342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.536358 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.536370 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.557464 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://bbcb15b2a3894df20637548ab8d3dbb90960e3bcdf49dade919747bcff14f1b8\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:15Z\\\",\\\"message\\\":\\\"or removal\\\\nI1125 10:32:14.956944 5969 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:14.956980 5969 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:14.956990 5969 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:14.957040 5969 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:14.957059 5969 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:14.957102 5969 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:14.957157 5969 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:14.957182 5969 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI1125 10:32:14.957196 5969 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:14.957190 5969 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI1125 10:32:14.957198 5969 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI1125 10:32:14.957230 5969 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI1125 10:32:14.957249 5969 factory.go:656] Stopping watch factory\\\\nI1125 10:32:14.957260 5969 ovnkube.go:599] Stopped ovnkube\\\\nI1125 1\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.570572 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.581413 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.593533 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.604626 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.623627 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.636170 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.639698 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.639721 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.639729 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.639743 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.639753 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.649815 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:23Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.742214 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.742252 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.742284 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.742299 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.742310 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.844675 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.844714 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.844727 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.844747 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.844759 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.947174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.947208 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.947217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.947230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:23 crc kubenswrapper[4702]: I1125 10:32:23.947239 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:23Z","lastTransitionTime":"2025-11-25T10:32:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.050465 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.050507 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.050520 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.050537 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.050548 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.153418 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.153469 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.153481 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.153498 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.153509 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.257160 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.257204 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.257217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.257236 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.257250 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.360406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.360468 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.360492 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.360522 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.360540 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.401371 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:24 crc kubenswrapper[4702]: E1125 10:32:24.401537 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.401371 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.401380 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:24 crc kubenswrapper[4702]: E1125 10:32:24.401649 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:24 crc kubenswrapper[4702]: E1125 10:32:24.401886 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.463337 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.463478 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.463502 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.463534 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.463555 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.566402 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.566470 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.566483 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.566499 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.566510 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.668882 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.668951 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.668959 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.668976 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.668986 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.771265 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.771309 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.771320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.771337 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.771349 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.874134 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.874186 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.874198 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.874217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.874232 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.976916 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.976956 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.976965 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.976980 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:24 crc kubenswrapper[4702]: I1125 10:32:24.976989 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:24Z","lastTransitionTime":"2025-11-25T10:32:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.079497 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.079537 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.079546 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.079559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.079568 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.182253 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.182301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.182309 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.182325 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.182334 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.183487 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.185026 4702 scope.go:117] "RemoveContainer" containerID="158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff" Nov 25 10:32:25 crc kubenswrapper[4702]: E1125 10:32:25.185333 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.202752 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.225510 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.260696 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.276356 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.284883 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.284965 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.284985 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.285011 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.285028 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.289845 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.306548 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.323495 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.339948 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.352620 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.365767 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.378434 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.388273 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.388312 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.388322 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.388336 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.388344 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.401810 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:25 crc kubenswrapper[4702]: E1125 10:32:25.401960 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.406623 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.424961 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.446988 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.454725 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:25 crc kubenswrapper[4702]: E1125 10:32:25.454867 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:25 crc kubenswrapper[4702]: E1125 10:32:25.454962 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:33.454942825 +0000 UTC m=+50.821538534 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.470497 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.489964 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.491042 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.491088 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.491098 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.491115 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.491127 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.511498 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:25Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.595814 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.595864 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.595876 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.595895 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.595929 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.699686 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.699751 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.699769 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.699800 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.699819 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.803454 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.803512 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.803529 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.803553 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.803568 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.906823 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.906881 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.906891 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.906916 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:25 crc kubenswrapper[4702]: I1125 10:32:25.906927 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:25Z","lastTransitionTime":"2025-11-25T10:32:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.009873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.009933 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.009945 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.009965 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.009981 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.112560 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.112596 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.112604 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.112619 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.112628 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.215704 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.215781 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.215813 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.215846 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.215870 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.319056 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.319097 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.319106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.319121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.319136 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.401420 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.401488 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.401549 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:26 crc kubenswrapper[4702]: E1125 10:32:26.401592 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:26 crc kubenswrapper[4702]: E1125 10:32:26.401753 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:26 crc kubenswrapper[4702]: E1125 10:32:26.401826 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.422182 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.422228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.422239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.422256 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.422265 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.524661 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.524740 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.524768 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.524799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.524820 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.627703 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.627769 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.627786 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.627810 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.627826 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.730087 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.730128 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.730136 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.730151 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.730167 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.833452 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.833523 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.833542 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.833573 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.833592 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.936417 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.936476 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.936488 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.936509 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:26 crc kubenswrapper[4702]: I1125 10:32:26.936523 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:26Z","lastTransitionTime":"2025-11-25T10:32:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.039062 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.039097 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.039106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.039148 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.039159 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.142115 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.142376 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.142478 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.142544 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.142609 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.245643 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.245709 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.245723 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.245764 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.245782 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.349137 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.349499 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.349605 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.349685 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.349763 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.401275 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:27 crc kubenswrapper[4702]: E1125 10:32:27.401725 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.452392 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.452448 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.452460 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.452478 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.452492 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.555173 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.555225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.555239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.555262 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.555274 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.659150 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.659196 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.659209 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.659228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.659240 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.762331 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.762717 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.763035 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.763261 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.763434 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.866837 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.867135 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.867224 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.867353 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.867447 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.970263 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.970310 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.970321 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.970341 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:27 crc kubenswrapper[4702]: I1125 10:32:27.970351 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:27Z","lastTransitionTime":"2025-11-25T10:32:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.074559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.074630 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.074653 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.074683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.074705 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.178188 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.178238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.178249 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.178267 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.178282 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.281178 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.281232 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.281241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.281259 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.281277 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.383774 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.383856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.383867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.383880 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.383890 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.401591 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.401627 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:28 crc kubenswrapper[4702]: E1125 10:32:28.401743 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.401591 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:28 crc kubenswrapper[4702]: E1125 10:32:28.401821 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:28 crc kubenswrapper[4702]: E1125 10:32:28.401888 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.487059 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.487097 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.487106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.487121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.487130 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.590402 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.590445 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.590455 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.590474 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.590486 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.693065 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.693113 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.693138 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.693157 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.693166 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.795342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.795385 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.795399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.795417 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.795428 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.898614 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.898651 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.898661 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.898678 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:28 crc kubenswrapper[4702]: I1125 10:32:28.898694 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:28Z","lastTransitionTime":"2025-11-25T10:32:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.001234 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.001314 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.001323 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.001374 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.001384 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.105120 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.105161 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.105203 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.105222 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.105233 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.208399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.208463 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.208474 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.208492 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.208501 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.315772 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.315828 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.315838 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.315856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.315866 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.402194 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:29 crc kubenswrapper[4702]: E1125 10:32:29.402598 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.419838 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.419956 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.419977 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.420004 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.420020 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.522596 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.522629 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.522638 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.522653 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.522664 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.626264 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.626344 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.626368 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.626398 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.626422 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.728943 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.728990 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.729000 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.729019 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.729032 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.832255 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.832311 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.832329 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.832353 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.832370 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.935942 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.936014 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.936031 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.936059 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:29 crc kubenswrapper[4702]: I1125 10:32:29.936078 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:29Z","lastTransitionTime":"2025-11-25T10:32:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.039127 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.039201 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.039276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.039399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.039427 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.141972 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.142028 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.142039 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.142054 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.142062 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.224422 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.224506 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.224525 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.224550 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.224567 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.236992 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:30Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.241540 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.241586 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.241595 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.241614 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.241623 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.255831 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:30Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.259264 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.259435 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.259503 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.259564 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.259619 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.271961 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:30Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.276815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.276860 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.276873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.276891 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.276943 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.292244 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:30Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.297426 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.297541 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.297557 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.297587 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.297602 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.312257 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:30Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:30Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.312424 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.314008 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.314038 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.314049 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.314067 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.314083 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.401406 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.401587 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.401779 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.401838 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.402153 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:30 crc kubenswrapper[4702]: E1125 10:32:30.402226 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.416867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.416943 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.416955 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.416979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.416994 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.519218 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.519277 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.519292 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.519314 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.519330 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.621563 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.621607 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.621616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.621631 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.621642 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.724655 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.725040 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.725142 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.725240 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.725329 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.827350 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.827595 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.827944 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.828021 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.828077 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.931263 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.931743 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.931931 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.932120 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:30 crc kubenswrapper[4702]: I1125 10:32:30.932264 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:30Z","lastTransitionTime":"2025-11-25T10:32:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.036298 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.036866 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.037079 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.037285 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.037479 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.141622 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.142111 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.142297 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.142432 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.142565 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.214135 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.229493 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.241416 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.245541 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.245577 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.245591 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.245612 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.245628 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.258940 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.279083 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.293652 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.310895 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.325540 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.348194 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.348242 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.348253 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.348268 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.348329 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.348578 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.360838 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.372046 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.384714 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.396949 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.401410 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:31 crc kubenswrapper[4702]: E1125 10:32:31.401612 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.412778 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.424284 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.434878 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.444687 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.451817 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.451845 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.451856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.451871 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.451882 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.456865 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.466488 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:31Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.554653 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.554688 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.554696 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.554709 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.554718 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.657174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.657216 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.657225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.657242 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.657257 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.759143 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.759223 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.759240 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.759264 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.759280 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.863230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.863302 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.863316 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.863342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.863360 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.965979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.966027 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.966039 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.966057 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:31 crc kubenswrapper[4702]: I1125 10:32:31.966074 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:31Z","lastTransitionTime":"2025-11-25T10:32:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.069512 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.069596 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.069615 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.069643 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.069664 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.173446 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.173527 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.173544 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.173563 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.173575 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.276951 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.277004 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.277020 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.277044 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.277060 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.381259 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.381321 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.381339 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.381363 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.381382 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.401896 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.402093 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:32 crc kubenswrapper[4702]: E1125 10:32:32.402245 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.402284 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:32 crc kubenswrapper[4702]: E1125 10:32:32.402420 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:32 crc kubenswrapper[4702]: E1125 10:32:32.402517 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.483757 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.483812 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.483823 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.483842 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.483854 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.586827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.586868 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.586877 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.586934 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.586950 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.690250 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.690361 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.690380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.690408 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.690427 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.793140 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.793180 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.793189 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.793204 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.793213 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.895546 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.895612 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.895635 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.895657 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.895700 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.998143 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.998195 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.998215 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.998247 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:32 crc kubenswrapper[4702]: I1125 10:32:32.998265 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:32Z","lastTransitionTime":"2025-11-25T10:32:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.100873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.100935 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.100946 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.100962 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.100974 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.203407 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.203472 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.203530 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.203575 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.203591 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.306487 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.306544 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.306553 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.306570 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.306581 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.405216 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:33 crc kubenswrapper[4702]: E1125 10:32:33.405403 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.415006 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.415035 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.415047 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.415063 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.415075 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.438119 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.449393 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.463349 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.476120 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.487376 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.501393 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.517078 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.517157 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.517174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.517194 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.517208 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.521379 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.535439 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.547662 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:33 crc kubenswrapper[4702]: E1125 10:32:33.547994 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:33 crc kubenswrapper[4702]: E1125 10:32:33.548092 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:32:49.548066811 +0000 UTC m=+66.914662500 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.551088 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.564926 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.581257 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.611510 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.619539 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.619573 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.619582 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.619601 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.619612 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.624261 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.641582 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.654821 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.683376 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.703195 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.722755 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.722787 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.722796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.722809 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.722871 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.729098 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:33Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.825705 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.825768 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.825780 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.825795 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.825807 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.928707 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.928747 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.928756 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.928769 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:33 crc kubenswrapper[4702]: I1125 10:32:33.928779 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:33Z","lastTransitionTime":"2025-11-25T10:32:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.030807 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.030867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.030878 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.030897 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.030930 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.133548 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.133620 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.133642 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.133667 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.133686 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.236745 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.236789 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.236799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.236815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.236825 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.339268 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.339334 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.339351 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.339373 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.339389 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.401375 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.401417 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.401465 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:34 crc kubenswrapper[4702]: E1125 10:32:34.401523 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:34 crc kubenswrapper[4702]: E1125 10:32:34.401611 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:34 crc kubenswrapper[4702]: E1125 10:32:34.401680 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.442501 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.442551 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.442563 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.442579 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.442591 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.545463 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.545541 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.545566 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.545595 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.545612 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.648725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.648770 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.648782 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.648799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.648813 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.751562 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.751599 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.751609 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.751622 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.751632 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.866450 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.866528 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.866546 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.866577 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.866596 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.969983 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.970034 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.970043 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.970060 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:34 crc kubenswrapper[4702]: I1125 10:32:34.970070 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:34Z","lastTransitionTime":"2025-11-25T10:32:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.073081 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.073152 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.073176 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.073205 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.073231 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.175991 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.176048 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.176065 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.176085 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.176096 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.278740 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.278780 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.278789 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.278802 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.278811 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.381665 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.381716 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.381726 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.381744 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.381755 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.402011 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:35 crc kubenswrapper[4702]: E1125 10:32:35.402156 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.484851 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.484917 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.484935 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.484950 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.484959 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.587827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.587867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.587878 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.587891 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.587919 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.690473 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.690521 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.690532 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.690549 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.690560 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.793090 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.793146 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.793156 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.793172 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.793180 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.895952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.896007 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.896021 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.896051 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.896075 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.997849 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.998120 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.998134 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.998151 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:35 crc kubenswrapper[4702]: I1125 10:32:35.998161 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:35Z","lastTransitionTime":"2025-11-25T10:32:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.100387 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.100423 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.100434 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.100480 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.100492 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.175129 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.175179 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.175210 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.175229 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175385 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175410 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175423 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175443 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175500 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175490 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175549 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175444 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175481 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:33:08.175464856 +0000 UTC m=+85.542060545 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175674 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:33:08.175650351 +0000 UTC m=+85.542246090 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175692 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:33:08.175682052 +0000 UTC m=+85.542277831 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.175723 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:33:08.175716543 +0000 UTC m=+85.542312322 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.202822 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.202856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.202866 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.202883 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.202893 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.276172 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.276429 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:33:08.276359092 +0000 UTC m=+85.642954791 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.306363 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.306416 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.306431 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.306452 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.306464 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.401988 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.402011 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.402011 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.402308 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.402393 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:36 crc kubenswrapper[4702]: E1125 10:32:36.402150 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.408931 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.408982 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.408998 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.409019 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.409034 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.511174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.511243 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.511256 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.511289 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.511301 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.614349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.614403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.614417 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.614433 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.614445 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.717720 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.717784 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.717796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.717817 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.717836 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.820029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.820086 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.820102 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.820162 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.820179 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.922546 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.922599 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.922614 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.922636 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:36 crc kubenswrapper[4702]: I1125 10:32:36.922651 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:36Z","lastTransitionTime":"2025-11-25T10:32:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.024885 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.024977 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.024992 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.025010 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.025023 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.127108 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.127143 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.127152 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.127167 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.127177 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.229357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.229418 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.229435 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.229458 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.229474 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.333531 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.333608 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.333622 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.333647 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.333660 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.401589 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:37 crc kubenswrapper[4702]: E1125 10:32:37.401775 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.403056 4702 scope.go:117] "RemoveContainer" containerID="158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.435846 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.436352 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.436365 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.436388 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.436401 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.539588 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.539633 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.539663 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.539696 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.539706 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.642485 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.642560 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.642618 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.642641 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.642674 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.745476 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.745539 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.745549 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.745581 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.745591 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.757402 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/1.log" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.759746 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.761122 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.782823 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.796970 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.811747 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.819508 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.833973 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.846714 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.848328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.848349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.848357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.848370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.848380 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.861746 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.876941 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.890935 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.905022 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.919193 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.940785 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.951442 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.951500 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.951514 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.951533 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.951544 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:37Z","lastTransitionTime":"2025-11-25T10:32:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.955235 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.967232 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.978802 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:37 crc kubenswrapper[4702]: I1125 10:32:37.992064 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:37Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.010359 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.021757 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.038629 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.052755 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.054131 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.054162 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.054173 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.054190 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.054202 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.069756 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.085508 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.101707 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.114400 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.127966 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.141509 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.156225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.156268 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.156280 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.156298 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.156310 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.161926 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.173884 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.185660 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.197868 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.211653 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.223724 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.233798 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.244640 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.257955 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.258019 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.258037 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.258059 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.258074 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.261026 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.280400 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.293192 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.360765 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.360821 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.360834 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.360854 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.360867 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.401577 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.401598 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:38 crc kubenswrapper[4702]: E1125 10:32:38.401733 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:38 crc kubenswrapper[4702]: E1125 10:32:38.401805 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.401619 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:38 crc kubenswrapper[4702]: E1125 10:32:38.401957 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.463406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.463443 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.463453 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.463472 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.463483 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.565804 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.565845 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.565854 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.565867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.565875 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.668103 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.668155 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.668166 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.668181 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.668200 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.768637 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/2.log" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.769613 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/1.log" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.770127 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.770162 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.770174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.770191 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.770204 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.772474 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c" exitCode=1 Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.772518 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.772552 4702 scope.go:117] "RemoveContainer" containerID="158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.773161 4702 scope.go:117] "RemoveContainer" containerID="4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c" Nov 25 10:32:38 crc kubenswrapper[4702]: E1125 10:32:38.773395 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.790266 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.802928 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.816285 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.828330 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.839653 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.852153 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.865587 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.876255 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.876293 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.876301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.876319 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.876328 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.886685 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.898984 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.911198 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.929133 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://158a04435fb86fdbfc97180b2d0b1d6d1f03984af4a55f01be516c0d8533c0ff\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"message\\\":\\\"eflector.go:311] Stopping reflector *v1.EgressQoS (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressqos/v1/apis/informers/externalversions/factory.go:140\\\\nI1125 10:32:17.791582 6148 reflector.go:311] Stopping reflector *v1.EndpointSlice (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793229 6148 reflector.go:311] Stopping reflector *v1.Service (0s) from k8s.io/client-go/informers/factory.go:160\\\\nI1125 10:32:17.793728 6148 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI1125 10:32:17.793763 6148 handler.go:190] Sending *v1.Pod event handler 3 for removal\\\\nI1125 10:32:17.793768 6148 handler.go:190] Sending *v1.Pod event handler 6 for removal\\\\nI1125 10:32:17.793793 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI1125 10:32:17.793796 6148 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI1125 10:32:17.793821 6148 factory.go:656] Stopping watch factory\\\\nI1125 10:32:17.793836 6148 handler.go:208] Removed *v1.Node event handler 7\\\\nI1125 10:32:17.793845 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI1125 10:32:17.793849 6148 handler.go:208] Removed *v1.Pod event handler 3\\\\nI1125 10:32:17.793867 6148 handler.go:208] Removed *v1.Pod event handler 6\\\\nI1125 10:32:17.793877 6148 handler.go:208] Removed *v1.NetworkPolicy ev\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.940064 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.950587 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.961205 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.970612 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.978701 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.978762 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.978774 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.978794 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.978808 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:38Z","lastTransitionTime":"2025-11-25T10:32:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:38 crc kubenswrapper[4702]: I1125 10:32:38.989691 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:38Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.003339 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.019244 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.081073 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.081130 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.081147 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.081171 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.081188 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.183761 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.183803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.183814 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.183831 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.183842 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.285537 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.285590 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.285599 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.285616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.285628 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.388343 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.388380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.388390 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.388403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.388412 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.402498 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:39 crc kubenswrapper[4702]: E1125 10:32:39.402693 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.490741 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.490803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.490817 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.490832 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.490843 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.593265 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.593323 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.593332 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.593348 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.593360 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.696614 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.696686 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.696708 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.696737 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.696762 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.779803 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/2.log" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.785314 4702 scope.go:117] "RemoveContainer" containerID="4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c" Nov 25 10:32:39 crc kubenswrapper[4702]: E1125 10:32:39.785475 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.799966 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.800052 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.800071 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.800100 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.800119 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.804439 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.822869 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.838736 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.852666 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.873136 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.883518 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.893812 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.902742 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.902788 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.902799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.902813 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.902824 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:39Z","lastTransitionTime":"2025-11-25T10:32:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.906479 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.919373 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.931459 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.944209 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.955829 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.966590 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.978582 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:39 crc kubenswrapper[4702]: I1125 10:32:39.988213 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:39Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.004741 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.004780 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.004792 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.004810 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.004822 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.008250 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.022283 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.037451 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.107788 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.108069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.108149 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.108225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.108298 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.210362 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.210394 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.210406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.210421 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.210432 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.313419 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.313727 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.313877 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.314088 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.314318 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.401689 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.401756 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.401776 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.401849 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.401987 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.402123 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.417703 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.417985 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.418064 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.418157 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.418242 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.449477 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.449536 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.449548 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.449570 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.449583 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.467691 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.473144 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.473258 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.473331 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.473421 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.473517 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.490086 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.494351 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.494416 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.494430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.494446 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.494457 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.506237 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.510058 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.510217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.510297 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.510427 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.510510 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.527080 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.530992 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.531047 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.531059 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.531079 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.531092 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.546478 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:40Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:40Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:40 crc kubenswrapper[4702]: E1125 10:32:40.546656 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.548919 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.548959 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.548972 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.548992 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.549010 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.652357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.652430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.652447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.652472 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.652490 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.755147 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.755200 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.755212 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.755228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.755237 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.858271 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.858328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.858339 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.858358 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.858371 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.960949 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.960993 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.961002 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.961028 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:40 crc kubenswrapper[4702]: I1125 10:32:40.961041 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:40Z","lastTransitionTime":"2025-11-25T10:32:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.063456 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.063519 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.063536 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.063561 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.063579 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.166806 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.166867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.166881 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.166936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.166953 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.270225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.270306 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.270332 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.270366 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.270393 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.374290 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.374352 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.374365 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.374384 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.374396 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.402064 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:41 crc kubenswrapper[4702]: E1125 10:32:41.402234 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.476239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.476286 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.476294 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.476311 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.476321 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.578981 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.579041 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.579057 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.579082 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.579098 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.682250 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.682301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.682314 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.682332 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.682343 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.785075 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.785134 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.785146 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.785166 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.785180 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.887538 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.887576 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.887587 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.887600 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.887609 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.990313 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.990360 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.990370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.990385 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:41 crc kubenswrapper[4702]: I1125 10:32:41.990396 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:41Z","lastTransitionTime":"2025-11-25T10:32:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.093074 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.093125 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.093136 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.093153 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.093165 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.195843 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.195884 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.195896 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.195928 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.195941 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.298666 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.298722 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.298734 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.298754 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.298768 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.401747 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.401775 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.401769 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:42 crc kubenswrapper[4702]: E1125 10:32:42.401942 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:42 crc kubenswrapper[4702]: E1125 10:32:42.402017 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:42 crc kubenswrapper[4702]: E1125 10:32:42.402272 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.402312 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.402357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.402372 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.402395 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.402411 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.505658 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.505718 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.505734 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.505754 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.505770 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.608876 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.609031 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.609061 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.609091 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.609110 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.712380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.712437 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.712446 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.712460 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.712470 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.820333 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.820384 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.820396 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.820417 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.820429 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.923133 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.923180 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.923188 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.923202 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:42 crc kubenswrapper[4702]: I1125 10:32:42.923212 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:42Z","lastTransitionTime":"2025-11-25T10:32:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.025910 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.025952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.025962 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.025978 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.025988 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.128173 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.128239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.128257 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.128282 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.128298 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.230338 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.230623 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.230709 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.230802 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.230891 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.333165 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.333253 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.333277 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.333307 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.333330 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.401362 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:43 crc kubenswrapper[4702]: E1125 10:32:43.401494 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.417051 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.428160 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.435762 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.435790 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.435801 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.435815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.435826 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.437976 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.448117 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.459554 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.469640 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.483245 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.498132 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.510762 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.522646 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.538700 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.538749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.538760 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.538780 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.538794 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.541538 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.551387 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.560928 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.571979 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.581295 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.598780 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.611174 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.623202 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:43Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.641334 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.641400 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.641430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.641446 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.641456 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.744491 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.744549 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.744564 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.744584 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.744598 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.847815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.847877 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.847893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.847932 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.847947 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.950547 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.950606 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.950623 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.950646 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:43 crc kubenswrapper[4702]: I1125 10:32:43.950663 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:43Z","lastTransitionTime":"2025-11-25T10:32:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.053320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.053354 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.053365 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.053381 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.053392 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.156391 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.156765 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.156796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.156826 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.156848 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.259739 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.259775 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.260169 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.260198 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.260209 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.363688 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.363722 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.363732 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.363750 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.363761 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.401752 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.401828 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:44 crc kubenswrapper[4702]: E1125 10:32:44.402007 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.402103 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:44 crc kubenswrapper[4702]: E1125 10:32:44.402271 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:44 crc kubenswrapper[4702]: E1125 10:32:44.402364 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.466449 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.466512 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.466528 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.466551 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.466563 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.569287 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.569354 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.569376 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.569406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.569428 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.672611 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.672659 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.672670 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.672688 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.672700 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.775889 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.775937 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.775944 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.775959 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.775968 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.878336 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.878387 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.878397 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.878414 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.878429 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.980881 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.980928 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.980937 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.980949 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:44 crc kubenswrapper[4702]: I1125 10:32:44.980958 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:44Z","lastTransitionTime":"2025-11-25T10:32:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.083247 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.083280 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.083288 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.083301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.083313 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.186487 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.186542 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.186554 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.186572 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.186587 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.289372 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.289417 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.289426 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.289440 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.289450 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.392192 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.392241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.392254 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.392270 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.392283 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.401675 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:45 crc kubenswrapper[4702]: E1125 10:32:45.401849 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.495730 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.495827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.495847 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.495942 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.495972 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.598735 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.598814 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.598829 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.598849 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.598861 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.701256 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.701296 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.701306 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.701320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.701330 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.804044 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.804124 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.804149 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.804182 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.804217 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.906610 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.906665 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.906682 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.906698 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:45 crc kubenswrapper[4702]: I1125 10:32:45.906708 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:45Z","lastTransitionTime":"2025-11-25T10:32:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.008977 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.009038 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.009054 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.009076 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.009096 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.111683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.111722 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.111737 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.111790 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.111803 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.215518 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.215575 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.215598 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.215627 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.215649 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.320512 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.320643 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.320666 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.320696 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.320717 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.401424 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.401511 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:46 crc kubenswrapper[4702]: E1125 10:32:46.401593 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.401511 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:46 crc kubenswrapper[4702]: E1125 10:32:46.401680 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:46 crc kubenswrapper[4702]: E1125 10:32:46.401776 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.423361 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.423419 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.423434 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.423453 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.423464 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.525276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.525329 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.525342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.525360 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.525372 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.628767 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.628802 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.628814 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.628831 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.628844 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.731585 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.731670 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.731693 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.731723 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.731747 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.834324 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.834390 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.834402 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.834423 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.834436 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.936875 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.936928 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.936936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.936952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:46 crc kubenswrapper[4702]: I1125 10:32:46.936961 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:46Z","lastTransitionTime":"2025-11-25T10:32:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.039228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.039272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.039284 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.039301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.039347 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.142228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.142269 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.142278 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.142294 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.142303 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.245058 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.245087 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.245095 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.245107 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.245115 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.347590 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.347628 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.347638 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.347651 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.347659 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.401706 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:47 crc kubenswrapper[4702]: E1125 10:32:47.401857 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.449976 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.450029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.450041 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.450081 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.450094 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.552698 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.552736 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.552744 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.552758 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.552767 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.655167 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.655214 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.655225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.655250 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.655265 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.757926 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.757978 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.757988 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.758003 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.758014 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.860347 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.860398 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.860412 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.860428 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.860439 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.962530 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.962579 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.962597 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.962615 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:47 crc kubenswrapper[4702]: I1125 10:32:47.962625 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:47Z","lastTransitionTime":"2025-11-25T10:32:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.065472 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.065524 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.065534 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.065549 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.065560 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.169227 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.169300 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.169315 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.169339 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.169353 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.271970 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.272032 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.272044 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.272062 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.272074 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.374892 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.374955 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.374967 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.374983 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.374995 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.401457 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.401534 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:48 crc kubenswrapper[4702]: E1125 10:32:48.401559 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.401534 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:48 crc kubenswrapper[4702]: E1125 10:32:48.402102 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:48 crc kubenswrapper[4702]: E1125 10:32:48.402209 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.477374 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.477421 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.477430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.477445 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.477456 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.579616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.579658 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.579668 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.579683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.579693 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.682100 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.682136 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.682149 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.682166 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.682177 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.784427 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.784478 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.784497 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.784513 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.784523 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.886485 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.886535 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.886546 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.886563 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.886575 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.988774 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.988821 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.988830 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.988845 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:48 crc kubenswrapper[4702]: I1125 10:32:48.988855 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:48Z","lastTransitionTime":"2025-11-25T10:32:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.091249 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.091281 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.091292 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.091308 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.091319 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.193725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.193772 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.193786 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.193806 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.193820 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.296519 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.296558 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.296568 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.296582 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.296591 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.399372 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.399419 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.399429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.399444 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.399454 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.401806 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:49 crc kubenswrapper[4702]: E1125 10:32:49.401954 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.501146 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.501188 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.501205 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.501230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.501241 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.603209 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.603249 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.603257 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.603275 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.603285 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.620814 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:49 crc kubenswrapper[4702]: E1125 10:32:49.620971 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:49 crc kubenswrapper[4702]: E1125 10:32:49.621035 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:33:21.621018958 +0000 UTC m=+98.987614647 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.705782 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.705833 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.705845 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.705868 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.705889 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.809086 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.809134 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.809173 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.809195 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.809210 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.912103 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.912143 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.912152 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.912167 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:49 crc kubenswrapper[4702]: I1125 10:32:49.912177 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:49Z","lastTransitionTime":"2025-11-25T10:32:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.014851 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.014885 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.014894 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.014926 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.014936 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.116866 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.116924 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.116936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.116952 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.116963 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.219350 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.219399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.219411 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.219433 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.219445 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.321962 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.322013 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.322022 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.322037 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.322046 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.401645 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.401695 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.401808 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.401820 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.401919 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.402000 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.423936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.423971 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.423979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.423992 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.424001 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.526324 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.526372 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.526381 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.526396 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.526411 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.628379 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.628420 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.628428 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.628443 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.628452 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.731228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.731276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.731286 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.731303 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.731315 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.776685 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.776722 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.776730 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.776743 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.776751 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.788961 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:50Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.794330 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.794370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.794389 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.794413 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.794424 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.807864 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:50Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.812089 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.812136 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.812144 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.812160 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.812172 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.827507 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:50Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.830738 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.830771 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.830781 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.830796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.830809 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.843450 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:50Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.846881 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.846927 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.846936 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.846950 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.846961 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.857828 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:50Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:50Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:50 crc kubenswrapper[4702]: E1125 10:32:50.858017 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.859535 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.859572 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.859583 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.859602 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.859614 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.962116 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.962164 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.962175 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.962192 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:50 crc kubenswrapper[4702]: I1125 10:32:50.962203 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:50Z","lastTransitionTime":"2025-11-25T10:32:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.065022 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.065075 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.065093 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.065119 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.065139 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.167301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.167344 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.167362 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.167380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.167391 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.269980 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.270016 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.270026 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.270069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.270081 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.372603 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.372647 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.372658 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.372674 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.372685 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.401312 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:51 crc kubenswrapper[4702]: E1125 10:32:51.401460 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.475092 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.475381 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.475480 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.475578 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.475671 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.579021 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.579073 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.579084 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.579103 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.579114 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.681025 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.681065 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.681076 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.681093 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.681105 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.783312 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.783348 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.783357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.783371 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.783380 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.886364 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.886410 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.886420 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.886437 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.886447 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.988568 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.988613 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.988623 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.988640 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:51 crc kubenswrapper[4702]: I1125 10:32:51.988649 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:51Z","lastTransitionTime":"2025-11-25T10:32:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.091670 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.091717 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.091726 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.091741 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.091752 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.194176 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.194225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.194236 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.194255 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.194265 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.296238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.296294 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.296304 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.296316 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.296324 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.398694 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.398741 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.398760 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.398775 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.398786 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.402071 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:52 crc kubenswrapper[4702]: E1125 10:32:52.402210 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.402324 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:52 crc kubenswrapper[4702]: E1125 10:32:52.402520 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.402755 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:52 crc kubenswrapper[4702]: E1125 10:32:52.402810 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.501524 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.501588 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.501604 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.501623 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.501634 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.603508 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.603556 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.603569 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.603587 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.603600 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.706538 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.706583 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.706595 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.706611 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.706623 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.808853 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.808891 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.808968 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.808985 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.808996 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.911029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.911060 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.911069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.911082 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:52 crc kubenswrapper[4702]: I1125 10:32:52.911091 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:52Z","lastTransitionTime":"2025-11-25T10:32:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.013024 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.013064 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.013075 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.013090 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.013103 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.115356 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.115410 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.115422 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.115439 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.115449 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.218930 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.218970 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.218979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.219015 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.219028 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.322168 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.322212 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.322247 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.322266 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.322276 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.402164 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:53 crc kubenswrapper[4702]: E1125 10:32:53.402312 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.402820 4702 scope.go:117] "RemoveContainer" containerID="4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c" Nov 25 10:32:53 crc kubenswrapper[4702]: E1125 10:32:53.402981 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.415304 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.424305 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.424403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.424416 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.424465 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.424480 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.428240 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.439474 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.452464 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.464232 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.475952 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.488343 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.505295 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.520307 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.526620 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.526654 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.526664 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.526680 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.526691 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.533087 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.543820 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.595076 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.612755 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.629033 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.629077 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.629091 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.629133 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.629144 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.629340 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.643822 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.655823 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.668465 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.682234 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.732265 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.732323 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.732335 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.732353 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.732365 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.830236 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/0.log" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.830285 4702 generic.go:334] "Generic (PLEG): container finished" podID="fc7bcda9-5809-4852-8dd7-414ead106d61" containerID="9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94" exitCode=1 Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.830314 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerDied","Data":"9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.830665 4702 scope.go:117] "RemoveContainer" containerID="9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.834585 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.834621 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.834632 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.834656 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.834668 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.844543 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.858302 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.871680 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.884848 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.898311 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.920570 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.935778 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.937168 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.937205 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.937218 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.937234 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.937245 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:53Z","lastTransitionTime":"2025-11-25T10:32:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.951346 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.964043 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.977722 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:53 crc kubenswrapper[4702]: I1125 10:32:53.988194 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:53Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.015061 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.029388 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.039591 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.039638 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.039650 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.039665 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.039679 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.046087 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.061961 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.075519 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.088519 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.102755 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.141669 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.141717 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.141726 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.141740 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.141750 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.244615 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.244654 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.244663 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.244677 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.244687 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.347436 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.347494 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.347505 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.347520 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.347529 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.401451 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.401557 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:54 crc kubenswrapper[4702]: E1125 10:32:54.401590 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:54 crc kubenswrapper[4702]: E1125 10:32:54.401705 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.401953 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:54 crc kubenswrapper[4702]: E1125 10:32:54.402104 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.449563 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.449856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.449984 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.450107 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.450190 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.553010 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.553062 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.553077 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.553097 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.553112 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.655871 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.655928 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.655937 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.655951 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.655959 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.758833 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.758895 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.758923 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.758941 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.758953 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.836518 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/0.log" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.836586 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerStarted","Data":"9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.849716 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.863601 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.863666 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.863679 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.863697 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.863710 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.866697 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.884761 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.898353 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.910810 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.926155 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.941334 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.964080 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.966071 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.966111 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.966125 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.966143 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.966154 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:54Z","lastTransitionTime":"2025-11-25T10:32:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.977703 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:54 crc kubenswrapper[4702]: I1125 10:32:54.990309 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:54Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.003334 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.015446 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.027465 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.037947 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.050852 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.067465 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.068658 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.068706 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.068717 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.068732 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.068743 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.091416 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.104869 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:32:55Z is after 2025-08-24T17:21:41Z" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.172145 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.172209 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.172223 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.172244 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.172260 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.274868 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.274934 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.274946 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.274963 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.274976 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.377767 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.377819 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.377831 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.377857 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.377869 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.401398 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:55 crc kubenswrapper[4702]: E1125 10:32:55.401560 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.479642 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.479673 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.479682 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.479696 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.479705 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.582465 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.582537 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.582559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.582591 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.582613 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.685694 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.685759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.685776 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.685803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.685822 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.789287 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.789354 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.789368 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.789387 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.789397 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.891485 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.891554 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.891568 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.891588 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.891626 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.993290 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.993326 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.993334 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.993348 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:55 crc kubenswrapper[4702]: I1125 10:32:55.993357 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:55Z","lastTransitionTime":"2025-11-25T10:32:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.095379 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.095422 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.095434 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.095454 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.095467 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.197164 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.197217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.197227 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.197244 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.197252 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.300317 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.300354 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.300366 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.300383 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.300394 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.401276 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.401306 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.401462 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:56 crc kubenswrapper[4702]: E1125 10:32:56.401462 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:56 crc kubenswrapper[4702]: E1125 10:32:56.401861 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:56 crc kubenswrapper[4702]: E1125 10:32:56.402149 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.402660 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.402683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.402712 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.402727 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.402738 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.506138 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.506177 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.506186 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.506217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.506227 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.608992 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.609040 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.609051 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.609069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.609081 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.711724 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.711759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.711768 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.711784 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.711793 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.814302 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.814344 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.814353 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.814368 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.814378 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.917420 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.917455 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.917467 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.917483 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:56 crc kubenswrapper[4702]: I1125 10:32:56.917494 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:56Z","lastTransitionTime":"2025-11-25T10:32:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.019979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.020018 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.020027 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.020043 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.020053 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.122414 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.122456 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.122468 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.122483 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.122496 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.224657 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.224706 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.224718 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.224734 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.224745 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.327237 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.327305 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.327319 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.327338 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.327350 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.401975 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:57 crc kubenswrapper[4702]: E1125 10:32:57.402126 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.428978 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.429018 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.429026 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.429040 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.429049 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.530888 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.530963 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.530977 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.530993 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.531004 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.634121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.634185 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.634202 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.634227 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.634253 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.737341 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.737379 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.737390 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.737406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.737424 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.839486 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.839530 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.839544 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.839560 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.839571 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.942559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.942623 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.942639 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.942666 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:57 crc kubenswrapper[4702]: I1125 10:32:57.942679 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:57Z","lastTransitionTime":"2025-11-25T10:32:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.045333 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.045377 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.045391 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.045408 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.045422 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.148536 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.148581 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.148593 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.148610 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.148624 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.252257 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.252310 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.252329 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.252348 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.252361 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.355564 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.355616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.355632 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.355655 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.355671 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.402195 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:32:58 crc kubenswrapper[4702]: E1125 10:32:58.402356 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.402214 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:32:58 crc kubenswrapper[4702]: E1125 10:32:58.402439 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.402195 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:32:58 crc kubenswrapper[4702]: E1125 10:32:58.402503 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.458083 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.458160 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.458189 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.458225 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.458249 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.561011 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.561327 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.561496 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.561530 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.561555 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.665512 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.665558 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.665567 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.665583 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.665593 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.768976 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.769298 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.769578 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.769785 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.770057 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.873609 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.873660 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.873675 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.873692 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.873707 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.976543 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.976577 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.976585 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.976597 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:58 crc kubenswrapper[4702]: I1125 10:32:58.976606 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:58Z","lastTransitionTime":"2025-11-25T10:32:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.079052 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.079346 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.079474 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.079599 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.079711 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.183720 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.183783 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.183800 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.183823 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.183841 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.287121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.287228 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.287266 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.287288 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.287300 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.390306 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.390342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.390353 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.390371 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.390383 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.402264 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:32:59 crc kubenswrapper[4702]: E1125 10:32:59.402446 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.492759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.493121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.493213 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.493320 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.493413 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.595889 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.595935 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.595943 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.595956 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.595964 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.698810 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.698856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.698870 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.698886 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.698929 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.801241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.801271 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.801279 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.801294 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.801302 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.904370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.904429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.904446 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.904469 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:32:59 crc kubenswrapper[4702]: I1125 10:32:59.904487 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:32:59Z","lastTransitionTime":"2025-11-25T10:32:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.007750 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.007820 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.007844 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.007873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.007894 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.112370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.112416 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.112429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.112449 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.112462 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.215177 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.215239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.215261 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.215289 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.215311 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.319001 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.319043 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.319057 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.319077 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.319091 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.401476 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:00 crc kubenswrapper[4702]: E1125 10:33:00.401598 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.401679 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:00 crc kubenswrapper[4702]: E1125 10:33:00.401992 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.402148 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:00 crc kubenswrapper[4702]: E1125 10:33:00.402400 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.421947 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.421997 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.422008 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.422028 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.422041 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.525376 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.525410 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.525419 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.525433 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.525442 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.628300 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.628364 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.628385 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.628410 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.628429 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.731395 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.731720 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.731990 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.732209 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.732483 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.835824 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.836286 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.836446 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.836749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.836961 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.940191 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.940272 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.940300 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.940331 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:00 crc kubenswrapper[4702]: I1125 10:33:00.940353 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:00Z","lastTransitionTime":"2025-11-25T10:33:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.043085 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.043137 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.043148 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.043167 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.043181 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.146723 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.146773 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.146783 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.146800 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.146813 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.153194 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.153230 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.153241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.153258 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.153267 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.167391 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:01Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.171755 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.171797 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.171808 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.171826 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.171840 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.185099 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:01Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.191067 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.191204 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.191222 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.191247 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.191264 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.211325 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:01Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.215589 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.216037 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.216109 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.216211 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.216273 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.228432 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:01Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.231960 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.232127 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.232211 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.232292 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.232371 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.245088 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:01Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.245311 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.249467 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.249580 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.249659 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.249748 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.249841 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.352338 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.352376 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.352386 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.352400 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.352409 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.401606 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:01 crc kubenswrapper[4702]: E1125 10:33:01.402074 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.455135 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.455178 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.455209 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.455229 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.455239 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.557296 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.557594 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.557683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.557760 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.557830 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.664175 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.664261 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.664278 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.664305 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.664322 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.769156 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.769284 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.769347 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.769380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.769438 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.874014 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.874098 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.874117 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.874146 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.874163 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.977593 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.977670 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.977693 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.977724 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:01 crc kubenswrapper[4702]: I1125 10:33:01.977748 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:01Z","lastTransitionTime":"2025-11-25T10:33:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.080556 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.080624 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.080638 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.080663 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.080682 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.183603 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.183674 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.183698 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.183722 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.183739 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.286244 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.286289 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.286297 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.286311 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.286322 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.388710 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.388744 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.388753 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.388772 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.388781 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.401522 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.401543 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.401552 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:02 crc kubenswrapper[4702]: E1125 10:33:02.401669 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:02 crc kubenswrapper[4702]: E1125 10:33:02.401771 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:02 crc kubenswrapper[4702]: E1125 10:33:02.401873 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.490957 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.491000 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.491011 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.491030 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.491042 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.593588 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.593639 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.593659 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.593681 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.593699 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.696715 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.696771 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.696783 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.696807 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.696834 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.799556 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.799615 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.799629 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.799650 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.799666 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.901405 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.901451 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.901462 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.901480 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:02 crc kubenswrapper[4702]: I1125 10:33:02.901492 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:02Z","lastTransitionTime":"2025-11-25T10:33:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.004234 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.004302 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.004312 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.004341 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.004351 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.108099 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.108160 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.108171 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.108189 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.108200 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.210677 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.210725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.210742 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.210765 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.210777 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.313468 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.313509 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.313519 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.313548 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.313558 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.402344 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:03 crc kubenswrapper[4702]: E1125 10:33:03.402490 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.416834 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.416882 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.416893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.416938 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.416951 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.423753 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.440225 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.455643 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.468566 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.479159 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.492182 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.506216 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.518237 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.518267 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.518276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.518289 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.518298 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.524446 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.534634 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.545818 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.557511 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.569944 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.579891 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.589993 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.603154 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.613581 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.621083 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.621118 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.621127 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.621143 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.621152 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.623759 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.634248 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:03Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.723803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.723840 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.723851 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.723866 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.723877 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.826324 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.826540 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.826630 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.826703 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.826764 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.929363 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.929403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.929433 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.929453 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:03 crc kubenswrapper[4702]: I1125 10:33:03.929465 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:03Z","lastTransitionTime":"2025-11-25T10:33:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.032734 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.032780 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.032790 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.032807 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.032818 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.135122 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.135153 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.135160 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.135173 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.135183 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.238055 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.238092 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.238104 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.238121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.238132 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.340470 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.340529 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.340545 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.340565 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.340577 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.401525 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.401565 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.401634 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:04 crc kubenswrapper[4702]: E1125 10:33:04.401740 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:04 crc kubenswrapper[4702]: E1125 10:33:04.401827 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:04 crc kubenswrapper[4702]: E1125 10:33:04.401921 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.443557 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.443610 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.443619 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.443634 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.443645 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.545777 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.545827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.545836 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.545854 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.545863 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.649764 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.649815 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.649827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.649847 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.649859 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.753183 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.753232 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.753242 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.753261 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.753270 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.855132 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.855182 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.855195 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.855215 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.855227 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.958033 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.958105 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.958133 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.958162 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:04 crc kubenswrapper[4702]: I1125 10:33:04.958182 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:04Z","lastTransitionTime":"2025-11-25T10:33:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.062514 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.062574 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.062588 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.062607 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.062623 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.165083 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.165120 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.165128 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.165141 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.165151 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.268061 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.268106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.268115 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.268132 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.268141 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.370875 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.370971 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.370989 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.371015 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.371035 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.401542 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:05 crc kubenswrapper[4702]: E1125 10:33:05.402121 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.402254 4702 scope.go:117] "RemoveContainer" containerID="4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.474876 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.474937 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.474950 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.474966 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.474977 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.576667 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.576699 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.576708 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.576722 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.576733 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.680091 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.680130 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.680139 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.680155 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.680165 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.783456 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.783749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.784047 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.784220 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.784391 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.887261 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.887309 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.887318 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.887333 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.887344 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.992587 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.992956 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.992974 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.992997 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:05 crc kubenswrapper[4702]: I1125 10:33:05.993015 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:05Z","lastTransitionTime":"2025-11-25T10:33:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.096803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.096930 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.096950 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.096979 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.097000 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.200009 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.200069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.200116 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.200142 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.200154 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.302117 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.302175 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.302186 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.302201 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.302213 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.401862 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.401974 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.402042 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:06 crc kubenswrapper[4702]: E1125 10:33:06.402352 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:06 crc kubenswrapper[4702]: E1125 10:33:06.402433 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:06 crc kubenswrapper[4702]: E1125 10:33:06.402542 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.405076 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.405140 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.405164 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.405186 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.405200 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.413218 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.507226 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.507265 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.507276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.507292 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.507303 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.609369 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.609399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.609409 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.609423 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.609434 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.711766 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.711818 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.711826 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.711841 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.711851 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.815248 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.815304 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.815323 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.815346 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.815362 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.877654 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/2.log" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.880843 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.881652 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.892887 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"101237fc-f099-40a3-94f0-8985c04bcbaf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404f34fc23977c4fbd704b63606be47b6607d02d850eb2cf09280abedf2afc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.911536 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.923624 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.923664 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.923677 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.923694 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.923708 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:06Z","lastTransitionTime":"2025-11-25T10:33:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.927844 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.939998 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.956810 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.971353 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:06 crc kubenswrapper[4702]: I1125 10:33:06.988015 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:06Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.003620 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.017127 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.026219 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.026439 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.026522 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.026595 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.026659 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.029764 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.041614 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.062233 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:33:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.075656 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.087274 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.099413 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.111371 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.129127 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.129346 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.129439 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.129512 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.129573 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.131048 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.141926 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.156035 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.231712 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.231781 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.231799 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.231822 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.231840 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.334343 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.334409 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.334424 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.334441 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.334453 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.402372 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:07 crc kubenswrapper[4702]: E1125 10:33:07.402583 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.438402 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.438469 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.438482 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.438508 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.438528 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.541654 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.542027 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.542123 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.542237 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.542323 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.645871 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.646394 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.646479 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.646550 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.646621 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.749972 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.750018 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.750029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.750046 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.750057 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.852560 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.852606 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.852616 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.852634 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.852650 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.887629 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/3.log" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.889236 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/2.log" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.892513 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" exitCode=1 Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.892570 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.892609 4702 scope.go:117] "RemoveContainer" containerID="4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.893392 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:33:07 crc kubenswrapper[4702]: E1125 10:33:07.893597 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.908858 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.924626 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.944080 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.955773 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.955842 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.955861 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.955893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.955947 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:07Z","lastTransitionTime":"2025-11-25T10:33:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.964372 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:07 crc kubenswrapper[4702]: I1125 10:33:07.978195 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:07Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.008869 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://4de569abc3dce00005f921c7681731cbe35798c0c0b6266a779703584f1dce3c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:38Z\\\",\\\"message\\\":\\\"04393 6415 ovn.go:134] Ensuring zone local for Pod openshift-network-console/networking-console-plugin-85b44fc459-gdk6g in node crc\\\\nI1125 10:32:38.204407 6415 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI1125 10:32:38.204419 6415 base_network_controller_pods.go:477] [default/openshift-network-console/networking-console-plugin-85b44fc459-gdk6g] creating logical port openshift-network-console_networking-console-plugin-85b44fc459-gdk6g for pod on switch crc\\\\nI1125 10:32:38.204444 6415 obj_retry.go:303] Retry object setup: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204459 6415 obj_retry.go:365] Adding new object: *v1.Pod openshift-multus/network-metrics-daemon-fnlmg\\\\nI1125 10:32:38.204466 6415 ovn.go:134] Ensuring zone local for Pod openshift-multus/network-metrics-daemon-fnlmg in node crc\\\\nF1125 10:32:38.204482 6415 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:37Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:33:07Z\\\",\\\"message\\\":\\\"ster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1125 10:33:07.074064 6797 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nF1125 10:33:07.074070 6797 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for namespace Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:33:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.023397 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.039566 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.054979 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.059856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.059955 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.059968 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.059991 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.060004 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.076156 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.094603 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.129304 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.148723 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.163822 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.163933 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.163955 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.164237 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.164258 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.172760 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.188140 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"101237fc-f099-40a3-94f0-8985c04bcbaf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404f34fc23977c4fbd704b63606be47b6607d02d850eb2cf09280abedf2afc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.206005 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.222537 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.227808 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.227852 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.227887 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.227927 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228042 4702 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228040 4702 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228093 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.228078677 +0000 UTC m=+149.594674366 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228141 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228184 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.228131979 +0000 UTC m=+149.594727838 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228200 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228245 4702 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228328 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.228298644 +0000 UTC m=+149.594894483 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228366 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228481 4702 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228600 4702 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.228681 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.228644375 +0000 UTC m=+149.595240264 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.235832 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.252436 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.268263 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.268318 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.268330 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.268349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.268361 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.329883 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.330142 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.330103428 +0000 UTC m=+149.696699137 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.371827 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.371884 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.371893 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.371949 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.371961 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.401761 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.401822 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.401850 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.401989 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.402097 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.402210 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.475128 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.475196 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.475216 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.475242 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.475258 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.577613 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.577649 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.577657 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.577670 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.577680 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.680349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.680391 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.680402 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.680418 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.680429 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.783435 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.783498 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.783516 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.783539 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.783559 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.886343 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.886399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.886413 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.886432 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.886446 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.899109 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/3.log" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.904433 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:33:08 crc kubenswrapper[4702]: E1125 10:33:08.904731 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.922480 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.944245 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.958785 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.975483 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.989137 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.989185 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.989201 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.989233 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.989250 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:08Z","lastTransitionTime":"2025-11-25T10:33:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:08 crc kubenswrapper[4702]: I1125 10:33:08.990543 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:08Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.004991 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.024847 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:33:07Z\\\",\\\"message\\\":\\\"ster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1125 10:33:07.074064 6797 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nF1125 10:33:07.074070 6797 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for namespace Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:33:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.036856 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.050568 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.064963 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.076957 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.092043 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.092074 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.092084 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.092098 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.092107 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.095484 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.109616 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.124990 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.137171 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"101237fc-f099-40a3-94f0-8985c04bcbaf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404f34fc23977c4fbd704b63606be47b6607d02d850eb2cf09280abedf2afc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.152275 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.165409 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.178175 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.191658 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:09Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.194439 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.194482 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.194491 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.194508 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.194518 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.297872 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.297947 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.297958 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.297977 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.297991 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.400332 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.400375 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.400383 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.400397 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.400408 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.401596 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:09 crc kubenswrapper[4702]: E1125 10:33:09.401742 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.503691 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.503762 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.503772 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.503786 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.503796 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.606612 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.606653 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.606665 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.606680 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.606691 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.708805 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.708873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.708895 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.708967 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.708989 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.812305 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.812345 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.812355 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.812368 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.812376 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.914974 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.915022 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.915031 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.915046 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:09 crc kubenswrapper[4702]: I1125 10:33:09.915059 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:09Z","lastTransitionTime":"2025-11-25T10:33:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.017017 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.017070 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.017081 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.017096 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.017106 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.120635 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.120721 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.120739 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.120769 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.120787 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.223231 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.223287 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.223303 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.223325 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.223341 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.326342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.326388 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.326400 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.326416 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.326431 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.402018 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.402091 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.402163 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:10 crc kubenswrapper[4702]: E1125 10:33:10.402236 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:10 crc kubenswrapper[4702]: E1125 10:33:10.402339 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:10 crc kubenswrapper[4702]: E1125 10:33:10.402440 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.430311 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.430380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.430397 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.430425 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.430438 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.533636 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.534169 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.534184 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.534229 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.534239 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.636570 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.636620 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.636636 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.636668 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.636684 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.743810 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.743886 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.743944 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.743977 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.743999 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.846561 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.846619 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.846632 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.846650 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.846661 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.948891 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.948996 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.949018 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.949044 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:10 crc kubenswrapper[4702]: I1125 10:33:10.949063 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:10Z","lastTransitionTime":"2025-11-25T10:33:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.051387 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.051430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.051441 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.051459 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.051482 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.154218 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.154276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.154288 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.154307 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.154319 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.257106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.257170 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.257182 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.257205 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.257218 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.360796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.360868 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.360890 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.360980 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.361006 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.368579 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.368619 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.368627 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.368637 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.368645 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.386423 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.390807 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.390846 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.390854 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.390869 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.390878 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.402241 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.402778 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.406499 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.412202 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.412546 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.412796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.413303 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.413653 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.429635 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.435108 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.435342 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.435447 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.436101 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.436150 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.448999 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.452821 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.452856 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.452867 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.452883 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.452933 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.473096 4702 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404560Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865360Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"lastTransitionTime\\\":\\\"2025-11-25T10:33:11Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"a04f18ec-6b5a-47c8-b0b5-77f700b576f7\\\",\\\"systemUUID\\\":\\\"163f1bb7-285f-4115-b335-3dabed78c4ea\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:11Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:11 crc kubenswrapper[4702]: E1125 10:33:11.473276 4702 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.475137 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.475175 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.475187 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.475204 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.475217 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.578283 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.578322 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.578331 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.578345 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.578356 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.681217 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.681298 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.681317 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.681337 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.681350 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.783680 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.783749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.783759 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.783773 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.783783 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.886559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.886601 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.886609 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.886624 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.886635 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.989465 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.989534 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.989559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.989593 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:11 crc kubenswrapper[4702]: I1125 10:33:11.989616 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:11Z","lastTransitionTime":"2025-11-25T10:33:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.092665 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.092718 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.092735 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.092758 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.092774 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.196043 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.196093 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.196104 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.196121 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.196133 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.298629 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.298662 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.298670 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.298683 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.298693 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.400539 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.400593 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.400604 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.400624 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.400638 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.401728 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.401729 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:12 crc kubenswrapper[4702]: E1125 10:33:12.401855 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:12 crc kubenswrapper[4702]: E1125 10:33:12.401916 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.401738 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:12 crc kubenswrapper[4702]: E1125 10:33:12.401972 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.503104 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.503149 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.503164 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.503181 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.503194 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.606002 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.606051 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.606060 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.606077 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.606088 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.708953 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.709014 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.709034 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.709052 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.709066 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.812624 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.812707 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.812725 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.812751 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.812769 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.915325 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.915371 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.915383 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.915401 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:12 crc kubenswrapper[4702]: I1125 10:33:12.915412 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:12Z","lastTransitionTime":"2025-11-25T10:33:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.018709 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.018801 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.018831 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.018860 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.018879 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.121970 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.122106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.122137 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.122167 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.122188 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.225611 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.225703 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.225734 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.225766 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.225787 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.329290 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.329374 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.329399 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.329429 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.329452 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.402480 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:13 crc kubenswrapper[4702]: E1125 10:33:13.403055 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.419027 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-pjw7q" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c862bae9-1615-46ec-a28d-889c38e69e81\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://151c0b901455fdc943c63a741d1323ed2cda8fcd613b22ae3029bbb544d75749\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-tsgrm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-pjw7q\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.432741 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.432817 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.432839 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.432869 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.432887 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.436295 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a8f7df1c9979a1db9879596f0f4c732ce8f68955bac1672c491edd2858b8eceb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-b5rwk\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-g5m5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.458169 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c142b187-40eb-432e-8d5c-be984db819e3\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f797e8fb6c81389a4bf37bb39d7b086565397664424cd38c3f09dad12cd7cac8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7f13279ab3cea7c1bcc296cf385c082ef5479c16810bd5077202fc436abdd52e\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b2b334599956d88e8ead025f294e40301b52e544e23b385505e78126bad88de9\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.474866 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://59c196c66fa6c0cf607f463de122a7eea3ae9cffe243f5b7026f8df55c09f1e4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.490221 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.505983 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://4f5559fa8c5556ca7420d3104006bd1e138f512f9c126f56c5cf1011c3e79553\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.520440 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.536024 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.536945 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.536959 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.536973 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.536982 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.538459 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:33:07Z\\\",\\\"message\\\":\\\"ster\\\\\\\", UUID:\\\\\\\"\\\\\\\", Protocol:\\\\\\\"TCP\\\\\\\", ExternalIDs:map[string]string{\\\\\\\"k8s.ovn.org/kind\\\\\\\":\\\\\\\"Service\\\\\\\", \\\\\\\"k8s.ovn.org/owner\\\\\\\":\\\\\\\"openshift-machine-api/control-plane-machine-set-operator\\\\\\\"}, Opts:services.LBOpts{Reject:true, EmptyLBEvents:false, AffinityTimeOut:0, SkipSNAT:false, Template:false, AddressFamily:\\\\\\\"\\\\\\\"}, Rules:[]services.LBRule{services.LBRule{Source:services.Addr{IP:\\\\\\\"10.217.4.41\\\\\\\", Port:9443, Template:(*services.Template)(nil)}, Targets:[]services.Addr{}}}, Templates:services.TemplateMap(nil), Switches:[]string{}, Routers:[]string{}, Groups:[]string{\\\\\\\"clusterLBGroup\\\\\\\"}}}\\\\nI1125 10:33:07.074064 6797 services_controller.go:452] Built service openshift-machine-api/control-plane-machine-set-operator per-node LB for network=default: []services.LB{}\\\\nF1125 10:33:07.074070 6797 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for namespace Informer during admin network policy controller initialization, handler {0x1fcbf20 0x1fcbc00 0x1fcbba0} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: failed calling webhook \\\\\\\"node.network-node-identity.o\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:33:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8mlzf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-h8hn4\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.548147 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qdjc6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ec475b07-7bf8-4c93-bfae-b60284870514\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:06Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://321b72c1afd3654c17bb49fa7f8d3767b3ec9acd5e5747202ee0809c896b71dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:07Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wq549\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:06Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qdjc6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.559973 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"760a5ac7-2fa3-420b-ae5f-2739a89dd67c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:16Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:18Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://6460c84660821d6e247c70d49655a9a8975f7721acc75195ef9efeb2e2f8a0ac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://6ddfba69d473c4bf6b5a44b41d4ffa4b2f756c4da79f1b96a929d08db2c5d36f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hf7fv\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:16Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-shfm2\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.570102 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c8f0f344-2768-49e0-a344-81f5b457b671\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:17Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-gchhd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:17Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-fnlmg\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.588847 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"e9524f35-01cd-400a-aabc-d196d1203bcf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:47Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aa358228727f2d8e4a2f5684544464f64e394a795d5d0a622a7fbb996ea8c48c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b610d4f4aef25aab0968886da03d080d8670d4e1f385da1a66d171e92f390104\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://3550e39c5b38c6e2229883bfd2219da84fbce154ef39be199fd7b50938602046\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://36834fd6676d86db2945f801088d324627ae1a5a64d5b35a556a0be1f0f5c044\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://f4216498f4d9909ba2dcf293a26f287c62205ec770f5c4e3980502845fd6307c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:47Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8330db6224b5a5eb4ba638a38f3fb054dacace3b038ee02bc83357adc1d96edd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://b21a423213ddccd3c99eafe1e9e2dfa3441ffe9cf68334c94141a2885ff6b868\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://501d4dfc9ebb9961c69b9bf86d42874383545a8d4d654eed2742bc535ecb484e\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.603181 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0f624474a8c727456e9128b96067659947f43bf3bfcd00bfe4d9495c49f6a070\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://3d0bb7eaef652f9c128bc9031085f319a1c406908c6648261d06b387d65a428e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.618711 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"89de2be3-64db-4383-951f-0758f58ffccb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:12Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:13Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c34683581ebcfbcdafc933b1274360404386bdc63774c7a8dfb334192c92ab5c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:12Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8885a30f38946586dd453d2c97b406426fffc482d1efe374f21503356c88c4c8\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:05Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a444e1e5928e80c6984167474e7354f1455b0838dcf8bc6ee51cd9d5e72d4f80\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1927289a7d4bed49cf950fcda82fc51611421e911a3860c8827df6824731feac\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:08Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:08Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://e3db247f013c7d2c2dae1dba8b63233fba5384d0fcc3f91b802cef80aec3ed85\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:09Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:09Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18ec32bb382cc0375f1cfdf8f0b2201a53e79ccb655de7c58fb2cb1761554f08\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:10Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b5d18e52cf2920d7d877baee9e8e8717aff5e9d37d0f3ce894829f458424045\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:32:11Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:11Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-wr6xh\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-v5gd5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.634208 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"101237fc-f099-40a3-94f0-8985c04bcbaf\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:46Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://404f34fc23977c4fbd704b63606be47b6607d02d850eb2cf09280abedf2afc97\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-crio\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kube\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://babf0b10692bcff328cd10c94c2f000190ccd436c84c610467fbd057782abda6\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var\\\",\\\"name\\\":\\\"var-lib-kubelet\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"kube-rbac-proxy-crio-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.640238 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.640308 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.640325 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.640436 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.640491 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.649481 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"06ceeb0a-a429-4102-a32d-1918c25ddc8c\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:37Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"ed_certificates.go:53] \\\\\\\"Loaded SNI cert\\\\\\\" index=0 certName=\\\\\\\"self-signed loopback\\\\\\\" certDetail=\\\\\\\"\\\\\\\\\\\\\\\"apiserver-loopback-client@1764066724\\\\\\\\\\\\\\\" [serving] validServingFor=[apiserver-loopback-client] issuer=\\\\\\\\\\\\\\\"apiserver-loopback-client-ca@1764066724\\\\\\\\\\\\\\\" (2025-11-25 09:32:04 +0000 UTC to 2026-11-25 09:32:04 +0000 UTC (now=2025-11-25 10:32:04.505823988 +0000 UTC))\\\\\\\"\\\\nI1125 10:32:04.505861 1 secure_serving.go:213] Serving securely on [::]:17697\\\\nI1125 10:32:04.505882 1 genericapiserver.go:683] [graceful-termination] waiting for shutdown to be initiated\\\\nI1125 10:32:04.505918 1 requestheader_controller.go:172] Starting RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505943 1 shared_informer.go:313] Waiting for caches to sync for RequestHeaderAuthRequestController\\\\nI1125 10:32:04.505968 1 dynamic_serving_content.go:135] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1849767505/tls.crt::/tmp/serving-cert-1849767505/tls.key\\\\\\\"\\\\nI1125 10:32:04.506057 1 tlsconfig.go:243] \\\\\\\"Starting DynamicServingCertificateController\\\\\\\"\\\\nI1125 10:32:04.506150 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506159 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::client-ca-file\\\\nI1125 10:32:04.506170 1 configmap_cafile_content.go:205] \\\\\\\"Starting controller\\\\\\\" name=\\\\\\\"client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\\\\"\\\\nI1125 10:32:04.506175 1 shared_informer.go:313] Waiting for caches to sync for client-ca::kube-system::extension-apiserver-authentication::requestheader-client-ca-file\\\\nI1125 10:32:04.506229 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"WatchListClient\\\\\\\" enabled=false\\\\nI1125 10:32:04.506238 1 envvar.go:172] \\\\\\\"Feature gate default state\\\\\\\" feature=\\\\\\\"InformerResourceVersion\\\\\\\" enabled=false\\\\nF1125 10:32:04.506868 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:58Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:21Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.661813 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0f759082-fc9a-42c3-8b42-4a8a4fa0eccc\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:31Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:31:43Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://5b886cfcd626a3d92b6db991e626c13d949413a067d2140327268922c3e1c5a7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c601e66adbbebbab39f97f63695f6b2c886d6023d862352d7242dccbf921218d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:45Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://faff57eeb0d772e9c9d434a61c25bc5ee79778e14a0ec9edc1b3b6f7db65b43f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:31:46Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ac008a10da261df2e51f02d4d2f0e359dea3cbeb58927786c5a559eb9f3707dd\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2025-11-25T10:31:44Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2025-11-25T10:31:44Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:31:43Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.677461 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.693701 4702 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-dxlxj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"fc7bcda9-5809-4852-8dd7-414ead106d61\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:05Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:04Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2025-11-25T10:32:54Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2025-11-25T10:32:53Z\\\",\\\"message\\\":\\\"2025-11-25T10:32:08+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def\\\\n2025-11-25T10:32:08+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_86512b0d-d363-4291-8d82-66512b8d2def to /host/opt/cni/bin/\\\\n2025-11-25T10:32:08Z [verbose] multus-daemon started\\\\n2025-11-25T10:32:08Z [verbose] Readiness Indicator file check\\\\n2025-11-25T10:32:53Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2025-11-25T10:32:04Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2025-11-25T10:32:53Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-8dqmn\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2025-11-25T10:32:04Z\\\"}}\" for pod \"openshift-multus\"/\"multus-dxlxj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2025-11-25T10:33:13Z is after 2025-08-24T17:21:41Z" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.743330 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.743377 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.743386 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.743404 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.743415 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.846709 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.846812 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.846826 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.846845 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.846878 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.949275 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.949308 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.949318 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.949331 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:13 crc kubenswrapper[4702]: I1125 10:33:13.949341 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:13Z","lastTransitionTime":"2025-11-25T10:33:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.052820 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.052943 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.052968 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.052997 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.053019 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.155630 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.155707 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.155731 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.155761 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.155785 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.258679 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.258745 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.258756 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.258775 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.258785 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.362346 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.362430 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.362449 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.362479 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.362501 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.402065 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.402105 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.402119 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:14 crc kubenswrapper[4702]: E1125 10:33:14.402266 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:14 crc kubenswrapper[4702]: E1125 10:33:14.402369 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:14 crc kubenswrapper[4702]: E1125 10:33:14.402597 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.466091 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.466155 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.466174 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.466197 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.466218 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.569222 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.569303 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.569328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.569358 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.569378 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.672655 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.672721 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.672732 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.672749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.672761 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.775919 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.775962 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.775973 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.775990 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.776001 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.879346 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.879410 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.879433 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.879461 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.879484 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.982128 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.982308 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.982377 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.982403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:14 crc kubenswrapper[4702]: I1125 10:33:14.982421 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:14Z","lastTransitionTime":"2025-11-25T10:33:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.085223 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.085275 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.085286 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.085301 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.085330 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.187875 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.187984 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.187994 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.188008 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.188019 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.290591 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.290635 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.290645 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.290662 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.290672 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.401347 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:15 crc kubenswrapper[4702]: E1125 10:33:15.402203 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.401489 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.402293 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.402319 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.402347 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.402364 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.505269 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.505339 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.505359 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.505383 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.505400 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.608406 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.608457 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.608474 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.608496 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.608511 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.711029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.711079 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.711092 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.711109 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.711121 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.813614 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.813666 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.813681 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.813702 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.813716 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.916098 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.916154 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.916166 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.916182 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:15 crc kubenswrapper[4702]: I1125 10:33:15.916195 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:15Z","lastTransitionTime":"2025-11-25T10:33:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.019392 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.019444 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.019452 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.019468 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.019479 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.122854 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.122924 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.122938 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.122958 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.122971 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.225720 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.225789 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.225803 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.225831 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.225850 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.329180 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.329248 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.329270 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.329296 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.329313 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.401832 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.401888 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.402029 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:16 crc kubenswrapper[4702]: E1125 10:33:16.402082 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:16 crc kubenswrapper[4702]: E1125 10:33:16.403302 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:16 crc kubenswrapper[4702]: E1125 10:33:16.403458 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.432570 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.432623 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.432632 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.432648 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.432658 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.535135 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.535184 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.535196 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.535213 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.535225 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.638890 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.638985 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.639006 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.639034 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.639057 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.741181 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.741234 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.741245 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.741267 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.741279 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.845674 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.845782 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.845806 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.845838 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.845873 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.950363 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.950435 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.950459 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.950495 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:16 crc kubenswrapper[4702]: I1125 10:33:16.950519 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:16Z","lastTransitionTime":"2025-11-25T10:33:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.054668 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.054718 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.054733 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.054754 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.054766 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.157682 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.157749 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.157760 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.157785 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.157799 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.261596 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.261660 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.261682 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.261708 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.261732 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.365510 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.365606 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.365631 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.365663 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.365685 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.401316 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:17 crc kubenswrapper[4702]: E1125 10:33:17.401533 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.469439 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.469513 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.469532 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.469555 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.469572 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.573107 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.573177 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.573196 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.573223 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.573241 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.675974 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.676029 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.676042 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.676064 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.676076 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.779269 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.780195 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.780360 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.780511 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.780676 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.884287 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.884330 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.884340 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.884356 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.884366 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.991263 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.991300 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.991309 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.991322 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:17 crc kubenswrapper[4702]: I1125 10:33:17.991331 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:17Z","lastTransitionTime":"2025-11-25T10:33:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.093367 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.093720 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.093796 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.093880 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.093974 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.197106 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.197431 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.197537 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.197645 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.197778 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.301109 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.301149 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.301162 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.301179 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.301191 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.401796 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.401870 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.401813 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:18 crc kubenswrapper[4702]: E1125 10:33:18.402019 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:18 crc kubenswrapper[4702]: E1125 10:33:18.402129 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:18 crc kubenswrapper[4702]: E1125 10:33:18.402211 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.403237 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.403270 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.403282 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.403299 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.403504 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.506986 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.507056 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.507069 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.507091 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.507106 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.611066 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.611135 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.611154 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.611182 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.611204 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.714215 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.714249 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.714260 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.714275 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.714284 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.816401 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.817078 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.817117 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.817142 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.817163 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.920018 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.920083 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.920095 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.920116 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:18 crc kubenswrapper[4702]: I1125 10:33:18.920129 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:18Z","lastTransitionTime":"2025-11-25T10:33:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.023171 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.023239 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.023256 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.023281 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.023301 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.126415 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.126505 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.126526 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.126559 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.126578 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.229204 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.229252 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.229273 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.229298 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.229315 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.332731 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.332790 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.332818 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.332846 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.332863 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.401415 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:19 crc kubenswrapper[4702]: E1125 10:33:19.401571 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.436276 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.436344 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.436358 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.436380 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.436393 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.540154 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.540673 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.541063 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.541470 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.541817 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.647695 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.647773 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.647788 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.647807 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.647845 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.751278 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.751349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.751372 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.751403 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.751423 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.854770 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.854851 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.854864 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.854886 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.854914 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.957996 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.958039 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.958051 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.958070 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:19 crc kubenswrapper[4702]: I1125 10:33:19.958086 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:19Z","lastTransitionTime":"2025-11-25T10:33:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.061200 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.061251 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.061266 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.061283 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.061294 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.164176 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.164236 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.164250 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.164270 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.164288 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.267631 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.267704 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.267723 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.267804 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.267843 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.371307 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.371357 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.371370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.371386 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.371400 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.401480 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:20 crc kubenswrapper[4702]: E1125 10:33:20.401679 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.401778 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.401937 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:20 crc kubenswrapper[4702]: E1125 10:33:20.402107 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:20 crc kubenswrapper[4702]: E1125 10:33:20.402233 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.475013 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.475062 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.475075 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.475096 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.475113 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.577568 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.577647 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.577667 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.577702 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.577722 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.681210 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.681253 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.681263 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.681278 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.681289 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.784453 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.784514 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.784522 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.784541 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.784551 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.887558 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.887602 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.887637 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.887655 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.887666 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.990040 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.990378 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.990475 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.990579 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:20 crc kubenswrapper[4702]: I1125 10:33:20.990662 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:20Z","lastTransitionTime":"2025-11-25T10:33:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.092741 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.092783 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.092794 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.092810 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.092820 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.196251 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.196322 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.196349 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.196393 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.196642 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.299328 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.299370 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.299379 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.299396 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.299408 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.401891 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.401991 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.402015 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.402048 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.402072 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.407777 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:21 crc kubenswrapper[4702]: E1125 10:33:21.407998 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.504978 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.505410 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.505565 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.505723 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.506048 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.596873 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.597292 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.597440 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.597595 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.597761 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.622212 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.622241 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.622252 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.622268 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.622280 4702 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2025-11-25T10:33:21Z","lastTransitionTime":"2025-11-25T10:33:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.675570 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:21 crc kubenswrapper[4702]: E1125 10:33:21.676400 4702 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:33:21 crc kubenswrapper[4702]: E1125 10:33:21.676532 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs podName:c8f0f344-2768-49e0-a344-81f5b457b671 nodeName:}" failed. No retries permitted until 2025-11-25 10:34:25.676496556 +0000 UTC m=+163.043092255 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs") pod "network-metrics-daemon-fnlmg" (UID: "c8f0f344-2768-49e0-a344-81f5b457b671") : object "openshift-multus"/"metrics-daemon-secret" not registered Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.677051 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k"] Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.677876 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.682777 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.683301 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.683494 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.683555 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.729216 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=77.729197495 podStartE2EDuration="1m17.729197495s" podCreationTimestamp="2025-11-25 10:32:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.729175004 +0000 UTC m=+99.095770723" watchObservedRunningTime="2025-11-25 10:33:21.729197495 +0000 UTC m=+99.095793194" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.783569 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.783619 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.783713 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.784038 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.784067 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-service-ca\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.805309 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-v5gd5" podStartSLOduration=78.80528373 podStartE2EDuration="1m18.80528373s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.775993987 +0000 UTC m=+99.142589696" watchObservedRunningTime="2025-11-25 10:33:21.80528373 +0000 UTC m=+99.171879419" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.828587 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=15.828562782 podStartE2EDuration="15.828562782s" podCreationTimestamp="2025-11-25 10:33:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.805552969 +0000 UTC m=+99.172148668" watchObservedRunningTime="2025-11-25 10:33:21.828562782 +0000 UTC m=+99.195158471" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.844424 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=76.844400967 podStartE2EDuration="1m16.844400967s" podCreationTimestamp="2025-11-25 10:32:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.828699006 +0000 UTC m=+99.195294695" watchObservedRunningTime="2025-11-25 10:33:21.844400967 +0000 UTC m=+99.210996666" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.844677 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=50.844671825 podStartE2EDuration="50.844671825s" podCreationTimestamp="2025-11-25 10:32:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.843336603 +0000 UTC m=+99.209932292" watchObservedRunningTime="2025-11-25 10:33:21.844671825 +0000 UTC m=+99.211267514" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.884648 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.884692 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.884712 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.884750 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.884771 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-service-ca\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.884930 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.885025 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.885530 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-service-ca\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.894603 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.902402 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-dxlxj" podStartSLOduration=78.902378305 podStartE2EDuration="1m18.902378305s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.874360552 +0000 UTC m=+99.240956261" watchObservedRunningTime="2025-11-25 10:33:21.902378305 +0000 UTC m=+99.268974004" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.910583 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/68d34ad7-2462-40a1-81a0-2dbf4122d2ff-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-n585k\" (UID: \"68d34ad7-2462-40a1-81a0-2dbf4122d2ff\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.936309 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-pjw7q" podStartSLOduration=78.936272565 podStartE2EDuration="1m18.936272565s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.923530319 +0000 UTC m=+99.290126008" watchObservedRunningTime="2025-11-25 10:33:21.936272565 +0000 UTC m=+99.302868254" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.936475 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podStartSLOduration=78.936411489 podStartE2EDuration="1m18.936411489s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.935964515 +0000 UTC m=+99.302560214" watchObservedRunningTime="2025-11-25 10:33:21.936411489 +0000 UTC m=+99.303007178" Nov 25 10:33:21 crc kubenswrapper[4702]: I1125 10:33:21.952209 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=72.952172032 podStartE2EDuration="1m12.952172032s" podCreationTimestamp="2025-11-25 10:32:09 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:21.95117222 +0000 UTC m=+99.317767919" watchObservedRunningTime="2025-11-25 10:33:21.952172032 +0000 UTC m=+99.318767721" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.006444 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.016446 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-qdjc6" podStartSLOduration=79.01642902 podStartE2EDuration="1m19.01642902s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:22.015708427 +0000 UTC m=+99.382304126" watchObservedRunningTime="2025-11-25 10:33:22.01642902 +0000 UTC m=+99.383024709" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.048495 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-shfm2" podStartSLOduration=79.048462951 podStartE2EDuration="1m19.048462951s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:22.035247059 +0000 UTC m=+99.401842748" watchObservedRunningTime="2025-11-25 10:33:22.048462951 +0000 UTC m=+99.415058640" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.401515 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.401518 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:22 crc kubenswrapper[4702]: E1125 10:33:22.401825 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:22 crc kubenswrapper[4702]: E1125 10:33:22.401684 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.401518 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:22 crc kubenswrapper[4702]: E1125 10:33:22.401940 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.947448 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" event={"ID":"68d34ad7-2462-40a1-81a0-2dbf4122d2ff","Type":"ContainerStarted","Data":"89c4a58bb9e8d17d7e76ac711b4630f0c39153b6f8a8cbee988e6bd795964225"} Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.947510 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" event={"ID":"68d34ad7-2462-40a1-81a0-2dbf4122d2ff","Type":"ContainerStarted","Data":"8deb0de2dd300654af750ee8d974283f2a0c07c78efcc9cc386c8c0f02709b56"} Nov 25 10:33:22 crc kubenswrapper[4702]: I1125 10:33:22.968698 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-n585k" podStartSLOduration=79.968674529 podStartE2EDuration="1m19.968674529s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:22.967829462 +0000 UTC m=+100.334425231" watchObservedRunningTime="2025-11-25 10:33:22.968674529 +0000 UTC m=+100.335270228" Nov 25 10:33:23 crc kubenswrapper[4702]: I1125 10:33:23.401283 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:23 crc kubenswrapper[4702]: E1125 10:33:23.403824 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:23 crc kubenswrapper[4702]: I1125 10:33:23.404765 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:33:23 crc kubenswrapper[4702]: E1125 10:33:23.404954 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:33:24 crc kubenswrapper[4702]: I1125 10:33:24.402119 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:24 crc kubenswrapper[4702]: I1125 10:33:24.402119 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:24 crc kubenswrapper[4702]: E1125 10:33:24.402295 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:24 crc kubenswrapper[4702]: E1125 10:33:24.402320 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:24 crc kubenswrapper[4702]: I1125 10:33:24.402808 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:24 crc kubenswrapper[4702]: E1125 10:33:24.403048 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:25 crc kubenswrapper[4702]: I1125 10:33:25.402062 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:25 crc kubenswrapper[4702]: E1125 10:33:25.402272 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:26 crc kubenswrapper[4702]: I1125 10:33:26.401483 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:26 crc kubenswrapper[4702]: I1125 10:33:26.401489 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:26 crc kubenswrapper[4702]: I1125 10:33:26.401483 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:26 crc kubenswrapper[4702]: E1125 10:33:26.402269 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:26 crc kubenswrapper[4702]: E1125 10:33:26.402440 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:26 crc kubenswrapper[4702]: E1125 10:33:26.402575 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:27 crc kubenswrapper[4702]: I1125 10:33:27.401775 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:27 crc kubenswrapper[4702]: E1125 10:33:27.402428 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:28 crc kubenswrapper[4702]: I1125 10:33:28.401975 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:28 crc kubenswrapper[4702]: I1125 10:33:28.401990 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:28 crc kubenswrapper[4702]: I1125 10:33:28.402089 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:28 crc kubenswrapper[4702]: E1125 10:33:28.402705 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:28 crc kubenswrapper[4702]: E1125 10:33:28.402876 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:28 crc kubenswrapper[4702]: E1125 10:33:28.402553 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:29 crc kubenswrapper[4702]: I1125 10:33:29.401858 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:29 crc kubenswrapper[4702]: E1125 10:33:29.402040 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:30 crc kubenswrapper[4702]: I1125 10:33:30.402298 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:30 crc kubenswrapper[4702]: I1125 10:33:30.402333 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:30 crc kubenswrapper[4702]: I1125 10:33:30.402298 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:30 crc kubenswrapper[4702]: E1125 10:33:30.402431 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:30 crc kubenswrapper[4702]: E1125 10:33:30.402489 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:30 crc kubenswrapper[4702]: E1125 10:33:30.402550 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:31 crc kubenswrapper[4702]: I1125 10:33:31.401226 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:31 crc kubenswrapper[4702]: E1125 10:33:31.401392 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:32 crc kubenswrapper[4702]: I1125 10:33:32.402054 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:32 crc kubenswrapper[4702]: I1125 10:33:32.402095 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:32 crc kubenswrapper[4702]: I1125 10:33:32.402118 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:32 crc kubenswrapper[4702]: E1125 10:33:32.402216 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:32 crc kubenswrapper[4702]: E1125 10:33:32.402686 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:32 crc kubenswrapper[4702]: E1125 10:33:32.402848 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:33 crc kubenswrapper[4702]: I1125 10:33:33.402228 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:33 crc kubenswrapper[4702]: E1125 10:33:33.402507 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:34 crc kubenswrapper[4702]: I1125 10:33:34.401822 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:34 crc kubenswrapper[4702]: E1125 10:33:34.402058 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:34 crc kubenswrapper[4702]: I1125 10:33:34.402138 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:34 crc kubenswrapper[4702]: I1125 10:33:34.402170 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:34 crc kubenswrapper[4702]: E1125 10:33:34.402428 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:34 crc kubenswrapper[4702]: E1125 10:33:34.402548 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:35 crc kubenswrapper[4702]: I1125 10:33:35.401641 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:35 crc kubenswrapper[4702]: E1125 10:33:35.401788 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:35 crc kubenswrapper[4702]: I1125 10:33:35.402425 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:33:35 crc kubenswrapper[4702]: E1125 10:33:35.402604 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-h8hn4_openshift-ovn-kubernetes(a50f8b41-e2d8-4d32-9306-bdb2a753a4b6)\"" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" Nov 25 10:33:36 crc kubenswrapper[4702]: I1125 10:33:36.401110 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:36 crc kubenswrapper[4702]: E1125 10:33:36.401289 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:36 crc kubenswrapper[4702]: I1125 10:33:36.401375 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:36 crc kubenswrapper[4702]: I1125 10:33:36.401395 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:36 crc kubenswrapper[4702]: E1125 10:33:36.401599 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:36 crc kubenswrapper[4702]: E1125 10:33:36.401964 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:37 crc kubenswrapper[4702]: I1125 10:33:37.401739 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:37 crc kubenswrapper[4702]: E1125 10:33:37.401951 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:38 crc kubenswrapper[4702]: I1125 10:33:38.401933 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:38 crc kubenswrapper[4702]: I1125 10:33:38.401933 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:38 crc kubenswrapper[4702]: I1125 10:33:38.402058 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:38 crc kubenswrapper[4702]: E1125 10:33:38.402164 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:38 crc kubenswrapper[4702]: E1125 10:33:38.402286 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:38 crc kubenswrapper[4702]: E1125 10:33:38.402350 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:39 crc kubenswrapper[4702]: I1125 10:33:39.402000 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:39 crc kubenswrapper[4702]: E1125 10:33:39.402501 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.007353 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/1.log" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.008164 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/0.log" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.008237 4702 generic.go:334] "Generic (PLEG): container finished" podID="fc7bcda9-5809-4852-8dd7-414ead106d61" containerID="9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf" exitCode=1 Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.008281 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerDied","Data":"9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf"} Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.008332 4702 scope.go:117] "RemoveContainer" containerID="9c9c7db3185d0cee663de9472cd5c6d127904b00519effc2d4ede76b719c3a94" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.008810 4702 scope.go:117] "RemoveContainer" containerID="9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf" Nov 25 10:33:40 crc kubenswrapper[4702]: E1125 10:33:40.009047 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-dxlxj_openshift-multus(fc7bcda9-5809-4852-8dd7-414ead106d61)\"" pod="openshift-multus/multus-dxlxj" podUID="fc7bcda9-5809-4852-8dd7-414ead106d61" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.401793 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.401834 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:40 crc kubenswrapper[4702]: E1125 10:33:40.402412 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:40 crc kubenswrapper[4702]: I1125 10:33:40.402565 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:40 crc kubenswrapper[4702]: E1125 10:33:40.402937 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:40 crc kubenswrapper[4702]: E1125 10:33:40.402485 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:41 crc kubenswrapper[4702]: I1125 10:33:41.014124 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/1.log" Nov 25 10:33:41 crc kubenswrapper[4702]: I1125 10:33:41.402129 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:41 crc kubenswrapper[4702]: E1125 10:33:41.402796 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:42 crc kubenswrapper[4702]: I1125 10:33:42.401518 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:42 crc kubenswrapper[4702]: I1125 10:33:42.401667 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:42 crc kubenswrapper[4702]: E1125 10:33:42.401690 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:42 crc kubenswrapper[4702]: I1125 10:33:42.401779 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:42 crc kubenswrapper[4702]: E1125 10:33:42.402243 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:42 crc kubenswrapper[4702]: E1125 10:33:42.402360 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:43 crc kubenswrapper[4702]: E1125 10:33:43.340116 4702 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Nov 25 10:33:43 crc kubenswrapper[4702]: I1125 10:33:43.402552 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:43 crc kubenswrapper[4702]: E1125 10:33:43.403091 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:43 crc kubenswrapper[4702]: E1125 10:33:43.541817 4702 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 10:33:44 crc kubenswrapper[4702]: I1125 10:33:44.401457 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:44 crc kubenswrapper[4702]: I1125 10:33:44.401511 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:44 crc kubenswrapper[4702]: E1125 10:33:44.401603 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:44 crc kubenswrapper[4702]: I1125 10:33:44.401457 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:44 crc kubenswrapper[4702]: E1125 10:33:44.401708 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:44 crc kubenswrapper[4702]: E1125 10:33:44.401998 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:45 crc kubenswrapper[4702]: I1125 10:33:45.401709 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:45 crc kubenswrapper[4702]: E1125 10:33:45.402012 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:46 crc kubenswrapper[4702]: I1125 10:33:46.401728 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:46 crc kubenswrapper[4702]: I1125 10:33:46.402020 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:46 crc kubenswrapper[4702]: I1125 10:33:46.401969 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:46 crc kubenswrapper[4702]: E1125 10:33:46.402118 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:46 crc kubenswrapper[4702]: E1125 10:33:46.402198 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:46 crc kubenswrapper[4702]: E1125 10:33:46.402257 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:47 crc kubenswrapper[4702]: I1125 10:33:47.401393 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:47 crc kubenswrapper[4702]: E1125 10:33:47.401563 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:47 crc kubenswrapper[4702]: I1125 10:33:47.402273 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.050426 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/3.log" Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.060001 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerStarted","Data":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.092090 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podStartSLOduration=105.092061542 podStartE2EDuration="1m45.092061542s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:33:48.091580466 +0000 UTC m=+125.458176175" watchObservedRunningTime="2025-11-25 10:33:48.092061542 +0000 UTC m=+125.458657231" Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.352277 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fnlmg"] Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.352404 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:48 crc kubenswrapper[4702]: E1125 10:33:48.352493 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.401725 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.401782 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:48 crc kubenswrapper[4702]: I1125 10:33:48.401749 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:48 crc kubenswrapper[4702]: E1125 10:33:48.401940 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:48 crc kubenswrapper[4702]: E1125 10:33:48.402059 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:48 crc kubenswrapper[4702]: E1125 10:33:48.402203 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:48 crc kubenswrapper[4702]: E1125 10:33:48.543789 4702 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 10:33:50 crc kubenswrapper[4702]: I1125 10:33:50.401219 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:50 crc kubenswrapper[4702]: I1125 10:33:50.401276 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:50 crc kubenswrapper[4702]: I1125 10:33:50.401282 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:50 crc kubenswrapper[4702]: I1125 10:33:50.401219 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:50 crc kubenswrapper[4702]: E1125 10:33:50.401395 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:50 crc kubenswrapper[4702]: E1125 10:33:50.401428 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:50 crc kubenswrapper[4702]: E1125 10:33:50.401482 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:50 crc kubenswrapper[4702]: E1125 10:33:50.401583 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:52 crc kubenswrapper[4702]: I1125 10:33:52.401961 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:52 crc kubenswrapper[4702]: I1125 10:33:52.401961 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:52 crc kubenswrapper[4702]: I1125 10:33:52.402192 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:52 crc kubenswrapper[4702]: E1125 10:33:52.402101 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:52 crc kubenswrapper[4702]: I1125 10:33:52.401984 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:52 crc kubenswrapper[4702]: E1125 10:33:52.402288 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:52 crc kubenswrapper[4702]: E1125 10:33:52.402369 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:52 crc kubenswrapper[4702]: E1125 10:33:52.402689 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:53 crc kubenswrapper[4702]: E1125 10:33:53.544585 4702 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Nov 25 10:33:54 crc kubenswrapper[4702]: I1125 10:33:54.402033 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:54 crc kubenswrapper[4702]: E1125 10:33:54.402477 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:54 crc kubenswrapper[4702]: I1125 10:33:54.402120 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:54 crc kubenswrapper[4702]: E1125 10:33:54.402704 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:54 crc kubenswrapper[4702]: I1125 10:33:54.402054 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:54 crc kubenswrapper[4702]: E1125 10:33:54.402931 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:54 crc kubenswrapper[4702]: I1125 10:33:54.402156 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:54 crc kubenswrapper[4702]: E1125 10:33:54.403150 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:55 crc kubenswrapper[4702]: I1125 10:33:55.183628 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:33:55 crc kubenswrapper[4702]: I1125 10:33:55.202191 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:33:55 crc kubenswrapper[4702]: I1125 10:33:55.402608 4702 scope.go:117] "RemoveContainer" containerID="9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf" Nov 25 10:33:56 crc kubenswrapper[4702]: I1125 10:33:56.089566 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/1.log" Nov 25 10:33:56 crc kubenswrapper[4702]: I1125 10:33:56.089675 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerStarted","Data":"556a391af94990867eec33ada4cd8f20bcf026bb13b614eccc44d39b2b055d1f"} Nov 25 10:33:56 crc kubenswrapper[4702]: I1125 10:33:56.402207 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:56 crc kubenswrapper[4702]: I1125 10:33:56.402263 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:56 crc kubenswrapper[4702]: I1125 10:33:56.402341 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:56 crc kubenswrapper[4702]: E1125 10:33:56.402427 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:56 crc kubenswrapper[4702]: I1125 10:33:56.402462 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:56 crc kubenswrapper[4702]: E1125 10:33:56.402650 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:56 crc kubenswrapper[4702]: E1125 10:33:56.402782 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:33:56 crc kubenswrapper[4702]: E1125 10:33:56.402980 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:58 crc kubenswrapper[4702]: I1125 10:33:58.401980 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:33:58 crc kubenswrapper[4702]: I1125 10:33:58.402070 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:33:58 crc kubenswrapper[4702]: E1125 10:33:58.402139 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-fnlmg" podUID="c8f0f344-2768-49e0-a344-81f5b457b671" Nov 25 10:33:58 crc kubenswrapper[4702]: I1125 10:33:58.402226 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:33:58 crc kubenswrapper[4702]: E1125 10:33:58.402373 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Nov 25 10:33:58 crc kubenswrapper[4702]: E1125 10:33:58.402529 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Nov 25 10:33:58 crc kubenswrapper[4702]: I1125 10:33:58.403562 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:33:58 crc kubenswrapper[4702]: E1125 10:33:58.403851 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.402165 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.403056 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.403241 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.403344 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.405359 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.405368 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.406288 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.409135 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.409386 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 25 10:34:00 crc kubenswrapper[4702]: I1125 10:34:00.409536 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.249729 4702 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.296290 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-fh9fl"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.297117 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.301684 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kfhfz"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.302631 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.302730 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.302976 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.303225 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.303487 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.304676 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.305495 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.308491 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.310729 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.310942 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.310993 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.311092 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.311293 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.311319 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.311880 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.312712 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b2ph2"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.313334 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.313548 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.312800 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.314053 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.314116 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.312992 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.315563 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vnwhc"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.316338 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.317546 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7s7k2"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.318517 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.320483 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.320601 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.320656 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.321697 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-sfbdd"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.323455 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.324118 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.325187 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.326838 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.327880 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.328241 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.328397 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.328541 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.328688 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.329851 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.330332 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.330690 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.331021 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.331293 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.331451 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.331531 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.331460 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.332051 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.332215 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.332573 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.336619 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.336930 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.336936 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.337642 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.337782 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.343120 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-zh82l"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.343821 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.344421 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.345979 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.346034 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.346235 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.346272 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.360083 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.360300 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.360858 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.363810 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.366242 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.364252 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.364320 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.381327 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.382170 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.382371 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.382512 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.382693 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.382987 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.383473 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.383593 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.384126 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.384272 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.386730 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.387050 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.387347 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.387518 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.387680 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.388206 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.388307 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.388433 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.388539 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.389783 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.390178 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.390238 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.390432 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.392352 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.392704 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.393532 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394247 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-trusted-ca-bundle\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394282 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-config\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394306 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-audit\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394327 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-etcd-serving-ca\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394350 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-trusted-ca\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394369 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394391 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1683b7f6-ecde-4865-ace5-0d570070451d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6fn9w\" (UID: \"1683b7f6-ecde-4865-ace5-0d570070451d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394414 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-etcd-client\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394436 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-serving-cert\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394470 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/738f53e3-118f-44e8-9b32-baeff939bef4-audit-dir\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394492 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-serving-cert\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394513 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-audit-policies\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394533 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394554 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dv94c\" (UniqueName: \"kubernetes.io/projected/8e260cd3-afc7-4f82-90de-e3cd459cc79a-kube-api-access-dv94c\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394577 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-config\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394602 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-z8pmz\" (UniqueName: \"kubernetes.io/projected/1683b7f6-ecde-4865-ace5-0d570070451d-kube-api-access-z8pmz\") pod \"cluster-samples-operator-665b6dd947-6fn9w\" (UID: \"1683b7f6-ecde-4865-ace5-0d570070451d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394625 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-etcd-client\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394647 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-image-import-ca\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394678 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-encryption-config\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394701 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-client-ca\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394740 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394761 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-encryption-config\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394782 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gnlfz\" (UniqueName: \"kubernetes.io/projected/738f53e3-118f-44e8-9b32-baeff939bef4-kube-api-access-gnlfz\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394805 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/738f53e3-118f-44e8-9b32-baeff939bef4-node-pullsecrets\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394824 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-config\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394843 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbkpc\" (UniqueName: \"kubernetes.io/projected/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-kube-api-access-fbkpc\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394862 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-serving-cert\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394893 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39e51e54-1814-4e9d-a6e0-42657e63a2c5-serving-cert\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.394932 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6r9w4\" (UniqueName: \"kubernetes.io/projected/39e51e54-1814-4e9d-a6e0-42657e63a2c5-kube-api-access-6r9w4\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.395402 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-2r4cg"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.395470 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8e260cd3-afc7-4f82-90de-e3cd459cc79a-audit-dir\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.395745 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-w2kdw"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.396108 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.396137 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.400764 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.402745 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.405380 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.407225 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.408582 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.409194 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.413341 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.414537 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8kd6d"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.415056 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.415513 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.415710 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.415881 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.416267 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.416489 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.416749 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.417890 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418250 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418366 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418564 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418665 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418764 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418864 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.418983 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.419076 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.419166 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.420735 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.421357 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.421387 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.422421 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.422729 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.425167 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.439880 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.440212 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.440316 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.440523 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.440722 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.440230 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.441023 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.458542 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-45qgf"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.459258 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.459481 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.459705 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.459966 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.460201 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.460662 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.461265 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.461396 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.461683 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.461959 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.464254 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.465495 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.466233 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.467365 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.467580 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.468233 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.469260 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.469806 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.473700 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.474509 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.475099 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6ntqc"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.475495 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.476394 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d5g4l"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.476817 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.477791 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.478662 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.479051 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.480503 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.480833 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zbgbq"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.480886 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.481368 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.482082 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-t7mgq"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.482534 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.484320 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.485640 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.485758 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.486035 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vnwhc"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.487033 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-sfbdd"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.498715 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519141 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519805 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/af02ba00-7c73-4bc3-a341-5dac59a49e12-images\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519848 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-config\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519875 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-audit\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519914 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-etcd-serving-ca\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519936 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-trusted-ca\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519955 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519976 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.519998 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76bd594-1b9f-4aff-bde0-390b917fbf5e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520017 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1683b7f6-ecde-4865-ace5-0d570070451d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6fn9w\" (UID: \"1683b7f6-ecde-4865-ace5-0d570070451d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520033 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-etcd-client\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520047 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-serving-cert\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520063 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520087 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-trusted-ca-bundle\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520107 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-audit-policies\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520135 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lmzcq\" (UniqueName: \"kubernetes.io/projected/b76bd594-1b9f-4aff-bde0-390b917fbf5e-kube-api-access-lmzcq\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520151 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvdp7\" (UniqueName: \"kubernetes.io/projected/af02ba00-7c73-4bc3-a341-5dac59a49e12-kube-api-access-xvdp7\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520169 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8101d279-dc32-48e9-80c8-52ea60394ca3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520189 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/738f53e3-118f-44e8-9b32-baeff939bef4-audit-dir\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520206 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6264f924-12b0-4afd-a7c3-ad6428ebfc79-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520224 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-serving-cert\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520242 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-audit-policies\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520260 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lk587\" (UniqueName: \"kubernetes.io/projected/de0d09a1-663f-4a61-a609-b74ee9eb887e-kube-api-access-lk587\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520278 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-console-config\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520297 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9bsfm\" (UniqueName: \"kubernetes.io/projected/8101d279-dc32-48e9-80c8-52ea60394ca3-kube-api-access-9bsfm\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520316 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dv94c\" (UniqueName: \"kubernetes.io/projected/8e260cd3-afc7-4f82-90de-e3cd459cc79a-kube-api-access-dv94c\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520335 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/de0d09a1-663f-4a61-a609-b74ee9eb887e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520353 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8101d279-dc32-48e9-80c8-52ea60394ca3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520371 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520388 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-config\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520406 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520424 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af02ba00-7c73-4bc3-a341-5dac59a49e12-config\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520443 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zx4q\" (UniqueName: \"kubernetes.io/projected/4468bda1-5b59-48d8-836b-87faa7f35726-kube-api-access-8zx4q\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520461 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-z8pmz\" (UniqueName: \"kubernetes.io/projected/1683b7f6-ecde-4865-ace5-0d570070451d-kube-api-access-z8pmz\") pod \"cluster-samples-operator-665b6dd947-6fn9w\" (UID: \"1683b7f6-ecde-4865-ace5-0d570070451d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520479 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-etcd-client\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520495 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9923db54-633b-4725-87f8-384fa9feac18-console-oauth-config\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520512 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5chxv\" (UniqueName: \"kubernetes.io/projected/9923db54-633b-4725-87f8-384fa9feac18-kube-api-access-5chxv\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520533 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-image-import-ca\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520550 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zcp4k\" (UniqueName: \"kubernetes.io/projected/a8621fa2-6cb1-4e0e-b1ed-3f254430262b-kube-api-access-zcp4k\") pod \"downloads-7954f5f757-2r4cg\" (UID: \"a8621fa2-6cb1-4e0e-b1ed-3f254430262b\") " pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520578 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520595 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hr4cq\" (UniqueName: \"kubernetes.io/projected/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-kube-api-access-hr4cq\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520611 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520628 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520648 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-encryption-config\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520664 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-client-ca\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520684 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b22715b1-a39f-4f63-a05d-4f49ce20b654-metrics-tls\") pod \"dns-operator-744455d44c-sfbdd\" (UID: \"b22715b1-a39f-4f63-a05d-4f49ce20b654\") " pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520702 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-oauth-serving-cert\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520726 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b76bd594-1b9f-4aff-bde0-390b917fbf5e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520749 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9923db54-633b-4725-87f8-384fa9feac18-console-serving-cert\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520767 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-config\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520783 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8101d279-dc32-48e9-80c8-52ea60394ca3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520799 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/266ce950-00f4-440d-9196-6a4ab41404ea-audit-dir\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520818 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-encryption-config\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520835 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520853 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520871 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520888 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520936 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gnlfz\" (UniqueName: \"kubernetes.io/projected/738f53e3-118f-44e8-9b32-baeff939bef4-kube-api-access-gnlfz\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520954 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kdhcn\" (UniqueName: \"kubernetes.io/projected/6264f924-12b0-4afd-a7c3-ad6428ebfc79-kube-api-access-kdhcn\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520972 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/738f53e3-118f-44e8-9b32-baeff939bef4-node-pullsecrets\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.520987 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-config\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521004 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521021 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/af02ba00-7c73-4bc3-a341-5dac59a49e12-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521039 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6264f924-12b0-4afd-a7c3-ad6428ebfc79-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521069 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbkpc\" (UniqueName: \"kubernetes.io/projected/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-kube-api-access-fbkpc\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521086 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-service-ca-bundle\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521103 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-serving-cert\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521119 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4468bda1-5b59-48d8-836b-87faa7f35726-serving-cert\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521144 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521163 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-service-ca\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521180 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39e51e54-1814-4e9d-a6e0-42657e63a2c5-serving-cert\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521199 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6r9w4\" (UniqueName: \"kubernetes.io/projected/39e51e54-1814-4e9d-a6e0-42657e63a2c5-kube-api-access-6r9w4\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521215 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vth9b\" (UniqueName: \"kubernetes.io/projected/b22715b1-a39f-4f63-a05d-4f49ce20b654-kube-api-access-vth9b\") pod \"dns-operator-744455d44c-sfbdd\" (UID: \"b22715b1-a39f-4f63-a05d-4f49ce20b654\") " pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521234 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521250 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521268 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8e260cd3-afc7-4f82-90de-e3cd459cc79a-audit-dir\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521286 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521306 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5jj9\" (UniqueName: \"kubernetes.io/projected/266ce950-00f4-440d-9196-6a4ab41404ea-kube-api-access-n5jj9\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521324 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-trusted-ca-bundle\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521342 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de0d09a1-663f-4a61-a609-b74ee9eb887e-serving-cert\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521873 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-config\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.521992 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.522348 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-audit\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.522611 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-config\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.522744 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-etcd-serving-ca\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.523682 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-trusted-ca\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.524705 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-image-import-ca\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.525092 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/738f53e3-118f-44e8-9b32-baeff939bef4-audit-dir\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.525134 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/738f53e3-118f-44e8-9b32-baeff939bef4-node-pullsecrets\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.525522 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-config\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.528026 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-etcd-client\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.528628 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-encryption-config\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.528933 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-serving-cert\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.528962 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/1683b7f6-ecde-4865-ace5-0d570070451d-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-6fn9w\" (UID: \"1683b7f6-ecde-4865-ace5-0d570070451d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.528962 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-audit-policies\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.529079 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/8e260cd3-afc7-4f82-90de-e3cd459cc79a-audit-dir\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.529114 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.529708 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.530285 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-encryption-config\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.530330 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.530404 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.530408 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.532114 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-client-ca\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.532086 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.532656 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/738f53e3-118f-44e8-9b32-baeff939bef4-serving-cert\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.533582 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b2ph2"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.533610 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-fh9fl"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.533682 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.533722 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/738f53e3-118f-44e8-9b32-baeff939bef4-trusted-ca-bundle\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.534693 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kfhfz"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.535093 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39e51e54-1814-4e9d-a6e0-42657e63a2c5-serving-cert\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.535146 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-s8rd8"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.536555 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/8e260cd3-afc7-4f82-90de-e3cd459cc79a-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.536648 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-serving-cert\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.537107 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.537436 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2r4cg"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.538875 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.542665 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.546096 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.546795 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.547756 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7s7k2"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.548691 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.549667 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-w2kdw"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.550610 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8kd6d"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.550955 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/8e260cd3-afc7-4f82-90de-e3cd459cc79a-etcd-client\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.552581 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.553591 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.554601 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zh82l"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.555653 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.556752 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-45qgf"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.558523 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.559620 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.559893 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.560674 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d5g4l"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.561718 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.562867 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.563879 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.564957 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-74xrt"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.566089 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-gn6ks"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.566268 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.566463 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.566928 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.568042 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.569067 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.570617 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.571584 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6ntqc"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.572814 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.574042 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-s8rd8"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.575171 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.576190 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.577246 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zbgbq"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.578518 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.578881 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.580615 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-74xrt"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.581828 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.582846 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-6qwtj"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.583508 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.584034 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6qwtj"] Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.599387 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.620150 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.621967 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622005 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622035 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9vrff\" (UniqueName: \"kubernetes.io/projected/1cae19df-e9ee-4a4c-b00e-814682583af1-kube-api-access-9vrff\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622062 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622088 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-signing-key\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622129 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kdhcn\" (UniqueName: \"kubernetes.io/projected/6264f924-12b0-4afd-a7c3-ad6428ebfc79-kube-api-access-kdhcn\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622152 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8ad397b-3500-4791-b005-c6cc5d83e8b1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622174 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622197 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/af02ba00-7c73-4bc3-a341-5dac59a49e12-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622221 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6264f924-12b0-4afd-a7c3-ad6428ebfc79-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622246 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4mhf\" (UniqueName: \"kubernetes.io/projected/0fcf11f0-0a26-4ddd-a603-70ace7390469-kube-api-access-m4mhf\") pod \"multus-admission-controller-857f4d67dd-45qgf\" (UID: \"0fcf11f0-0a26-4ddd-a603-70ace7390469\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622271 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pl8nz\" (UniqueName: \"kubernetes.io/projected/1c0109c0-09bf-407d-b336-e3ff9f6ecea6-kube-api-access-pl8nz\") pod \"package-server-manager-789f6589d5-bqhvf\" (UID: \"1c0109c0-09bf-407d-b336-e3ff9f6ecea6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622293 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-config\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622325 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-service-ca-bundle\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622345 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae19df-e9ee-4a4c-b00e-814682583af1-config\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622369 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4468bda1-5b59-48d8-836b-87faa7f35726-serving-cert\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622389 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-client-ca\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622422 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622444 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-service-ca\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622467 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/097c4dd5-c85f-447c-9448-5969f491f49d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622550 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0109c0-09bf-407d-b336-e3ff9f6ecea6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-bqhvf\" (UID: \"1c0109c0-09bf-407d-b336-e3ff9f6ecea6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622586 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2mbc9\" (UniqueName: \"kubernetes.io/projected/097c4dd5-c85f-447c-9448-5969f491f49d-kube-api-access-2mbc9\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622610 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vth9b\" (UniqueName: \"kubernetes.io/projected/b22715b1-a39f-4f63-a05d-4f49ce20b654-kube-api-access-vth9b\") pod \"dns-operator-744455d44c-sfbdd\" (UID: \"b22715b1-a39f-4f63-a05d-4f49ce20b654\") " pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622642 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622662 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9402a523-ed8c-499b-bac9-0a0d6598ef52-serving-cert\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622684 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7vxs\" (UniqueName: \"kubernetes.io/projected/9402a523-ed8c-499b-bac9-0a0d6598ef52-kube-api-access-h7vxs\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622709 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622730 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tp5x6\" (UniqueName: \"kubernetes.io/projected/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-kube-api-access-tp5x6\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622755 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622783 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de0d09a1-663f-4a61-a609-b74ee9eb887e-serving-cert\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622806 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5jj9\" (UniqueName: \"kubernetes.io/projected/266ce950-00f4-440d-9196-6a4ab41404ea-kube-api-access-n5jj9\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622829 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pwg8\" (UniqueName: \"kubernetes.io/projected/5d45eacc-c698-4f1e-bccd-05d88696c983-kube-api-access-5pwg8\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622856 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/af02ba00-7c73-4bc3-a341-5dac59a49e12-images\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622879 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622921 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622948 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76bd594-1b9f-4aff-bde0-390b917fbf5e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622973 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-trusted-ca-bundle\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.622998 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8ad397b-3500-4791-b005-c6cc5d83e8b1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.623149 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6264f924-12b0-4afd-a7c3-ad6428ebfc79-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.624105 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/af02ba00-7c73-4bc3-a341-5dac59a49e12-images\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.624298 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-trusted-ca-bundle\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.624564 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b76bd594-1b9f-4aff-bde0-390b917fbf5e-config\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.623205 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-audit-policies\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625163 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lmzcq\" (UniqueName: \"kubernetes.io/projected/b76bd594-1b9f-4aff-bde0-390b917fbf5e-kube-api-access-lmzcq\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625095 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625285 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8101d279-dc32-48e9-80c8-52ea60394ca3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625320 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0fcf11f0-0a26-4ddd-a603-70ace7390469-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-45qgf\" (UID: \"0fcf11f0-0a26-4ddd-a603-70ace7390469\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625344 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-signing-cabundle\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625376 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvdp7\" (UniqueName: \"kubernetes.io/projected/af02ba00-7c73-4bc3-a341-5dac59a49e12-kube-api-access-xvdp7\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.625459 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.626124 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-service-ca\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.626246 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6264f924-12b0-4afd-a7c3-ad6428ebfc79-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.626666 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-audit-policies\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.626789 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.626915 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5d45eacc-c698-4f1e-bccd-05d88696c983-srv-cert\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627002 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xhgtr\" (UniqueName: \"kubernetes.io/projected/8138f6f9-e4df-436f-9b58-b9b3f3e80b26-kube-api-access-xhgtr\") pod \"migrator-59844c95c7-5klvj\" (UID: \"8138f6f9-e4df-436f-9b58-b9b3f3e80b26\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627082 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lk587\" (UniqueName: \"kubernetes.io/projected/de0d09a1-663f-4a61-a609-b74ee9eb887e-kube-api-access-lk587\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.626941 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627206 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627292 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-console-config\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627367 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9bsfm\" (UniqueName: \"kubernetes.io/projected/8101d279-dc32-48e9-80c8-52ea60394ca3-kube-api-access-9bsfm\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627439 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5d45eacc-c698-4f1e-bccd-05d88696c983-profile-collector-cert\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627514 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/de0d09a1-663f-4a61-a609-b74ee9eb887e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627642 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8101d279-dc32-48e9-80c8-52ea60394ca3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627316 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/de0d09a1-663f-4a61-a609-b74ee9eb887e-serving-cert\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627876 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.627954 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-service-ca-bundle\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.628028 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af02ba00-7c73-4bc3-a341-5dac59a49e12-config\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.628246 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-console-config\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.628442 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/de0d09a1-663f-4a61-a609-b74ee9eb887e-available-featuregates\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629178 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/af02ba00-7c73-4bc3-a341-5dac59a49e12-config\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629242 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zx4q\" (UniqueName: \"kubernetes.io/projected/4468bda1-5b59-48d8-836b-87faa7f35726-kube-api-access-8zx4q\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629293 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629302 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/8101d279-dc32-48e9-80c8-52ea60394ca3-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629333 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9923db54-633b-4725-87f8-384fa9feac18-console-oauth-config\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629403 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5chxv\" (UniqueName: \"kubernetes.io/projected/9923db54-633b-4725-87f8-384fa9feac18-kube-api-access-5chxv\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629507 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zcp4k\" (UniqueName: \"kubernetes.io/projected/a8621fa2-6cb1-4e0e-b1ed-3f254430262b-kube-api-access-zcp4k\") pod \"downloads-7954f5f757-2r4cg\" (UID: \"a8621fa2-6cb1-4e0e-b1ed-3f254430262b\") " pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629544 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629577 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hr4cq\" (UniqueName: \"kubernetes.io/projected/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-kube-api-access-hr4cq\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629597 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8ad397b-3500-4791-b005-c6cc5d83e8b1-config\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629615 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629639 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629666 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-oauth-serving-cert\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629701 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b76bd594-1b9f-4aff-bde0-390b917fbf5e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629721 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b22715b1-a39f-4f63-a05d-4f49ce20b654-metrics-tls\") pod \"dns-operator-744455d44c-sfbdd\" (UID: \"b22715b1-a39f-4f63-a05d-4f49ce20b654\") " pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629752 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9923db54-633b-4725-87f8-384fa9feac18-console-serving-cert\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629775 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae19df-e9ee-4a4c-b00e-814682583af1-serving-cert\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629797 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-config\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629818 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8101d279-dc32-48e9-80c8-52ea60394ca3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629838 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/097c4dd5-c85f-447c-9448-5969f491f49d-srv-cert\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629858 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/266ce950-00f4-440d-9196-6a4ab41404ea-audit-dir\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.629973 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/266ce950-00f4-440d-9196-6a4ab41404ea-audit-dir\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.630531 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.630612 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.630643 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/4468bda1-5b59-48d8-836b-87faa7f35726-config\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.630926 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.630922 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/4468bda1-5b59-48d8-836b-87faa7f35726-serving-cert\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.630927 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/9923db54-633b-4725-87f8-384fa9feac18-oauth-serving-cert\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.631219 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8101d279-dc32-48e9-80c8-52ea60394ca3-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.631534 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.632177 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.632192 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.632842 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.633055 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b76bd594-1b9f-4aff-bde0-390b917fbf5e-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.633464 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.633922 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/9923db54-633b-4725-87f8-384fa9feac18-console-serving-cert\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.635313 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b22715b1-a39f-4f63-a05d-4f49ce20b654-metrics-tls\") pod \"dns-operator-744455d44c-sfbdd\" (UID: \"b22715b1-a39f-4f63-a05d-4f49ce20b654\") " pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.639448 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/af02ba00-7c73-4bc3-a341-5dac59a49e12-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.639771 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6264f924-12b0-4afd-a7c3-ad6428ebfc79-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.642571 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/9923db54-633b-4725-87f8-384fa9feac18-console-oauth-config\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.660831 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.692751 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.699817 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.719806 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730727 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae19df-e9ee-4a4c-b00e-814682583af1-serving-cert\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730760 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/097c4dd5-c85f-447c-9448-5969f491f49d-srv-cert\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730780 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9vrff\" (UniqueName: \"kubernetes.io/projected/1cae19df-e9ee-4a4c-b00e-814682583af1-kube-api-access-9vrff\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730799 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-signing-key\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730827 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8ad397b-3500-4791-b005-c6cc5d83e8b1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730846 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4mhf\" (UniqueName: \"kubernetes.io/projected/0fcf11f0-0a26-4ddd-a603-70ace7390469-kube-api-access-m4mhf\") pod \"multus-admission-controller-857f4d67dd-45qgf\" (UID: \"0fcf11f0-0a26-4ddd-a603-70ace7390469\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730860 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pl8nz\" (UniqueName: \"kubernetes.io/projected/1c0109c0-09bf-407d-b336-e3ff9f6ecea6-kube-api-access-pl8nz\") pod \"package-server-manager-789f6589d5-bqhvf\" (UID: \"1c0109c0-09bf-407d-b336-e3ff9f6ecea6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730877 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae19df-e9ee-4a4c-b00e-814682583af1-config\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730914 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-config\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730936 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-client-ca\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730951 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/097c4dd5-c85f-447c-9448-5969f491f49d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730975 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0109c0-09bf-407d-b336-e3ff9f6ecea6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-bqhvf\" (UID: \"1c0109c0-09bf-407d-b336-e3ff9f6ecea6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.730993 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2mbc9\" (UniqueName: \"kubernetes.io/projected/097c4dd5-c85f-447c-9448-5969f491f49d-kube-api-access-2mbc9\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731019 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9402a523-ed8c-499b-bac9-0a0d6598ef52-serving-cert\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731036 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7vxs\" (UniqueName: \"kubernetes.io/projected/9402a523-ed8c-499b-bac9-0a0d6598ef52-kube-api-access-h7vxs\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731055 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tp5x6\" (UniqueName: \"kubernetes.io/projected/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-kube-api-access-tp5x6\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731080 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pwg8\" (UniqueName: \"kubernetes.io/projected/5d45eacc-c698-4f1e-bccd-05d88696c983-kube-api-access-5pwg8\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731103 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8ad397b-3500-4791-b005-c6cc5d83e8b1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731143 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0fcf11f0-0a26-4ddd-a603-70ace7390469-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-45qgf\" (UID: \"0fcf11f0-0a26-4ddd-a603-70ace7390469\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731159 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-signing-cabundle\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731175 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5d45eacc-c698-4f1e-bccd-05d88696c983-srv-cert\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731191 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5d45eacc-c698-4f1e-bccd-05d88696c983-profile-collector-cert\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731207 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xhgtr\" (UniqueName: \"kubernetes.io/projected/8138f6f9-e4df-436f-9b58-b9b3f3e80b26-kube-api-access-xhgtr\") pod \"migrator-59844c95c7-5klvj\" (UID: \"8138f6f9-e4df-436f-9b58-b9b3f3e80b26\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731269 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8ad397b-3500-4791-b005-c6cc5d83e8b1-config\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.731975 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-config\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.734476 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9402a523-ed8c-499b-bac9-0a0d6598ef52-serving-cert\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.740015 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.745157 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-client-ca\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.759361 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.780156 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.800810 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.819685 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.824201 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/0fcf11f0-0a26-4ddd-a603-70ace7390469-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-45qgf\" (UID: \"0fcf11f0-0a26-4ddd-a603-70ace7390469\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.840188 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.859037 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.864746 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/d8ad397b-3500-4791-b005-c6cc5d83e8b1-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.879417 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.882894 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d8ad397b-3500-4791-b005-c6cc5d83e8b1-config\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.899506 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.919150 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.939990 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.971129 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.979650 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 25 10:34:02 crc kubenswrapper[4702]: I1125 10:34:02.999171 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.019864 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.040398 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.059198 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.079962 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.099802 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.119985 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.139876 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.144121 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/097c4dd5-c85f-447c-9448-5969f491f49d-srv-cert\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.159935 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.179740 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.200639 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.204910 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/5d45eacc-c698-4f1e-bccd-05d88696c983-srv-cert\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.221230 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.224506 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/5d45eacc-c698-4f1e-bccd-05d88696c983-profile-collector-cert\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.226268 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/097c4dd5-c85f-447c-9448-5969f491f49d-profile-collector-cert\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.240839 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.245000 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/1c0109c0-09bf-407d-b336-e3ff9f6ecea6-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-bqhvf\" (UID: \"1c0109c0-09bf-407d-b336-e3ff9f6ecea6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.260562 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.285112 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.299574 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.320733 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.324659 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-signing-key\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.339846 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.342772 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-signing-cabundle\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.360026 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.380793 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.400250 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.428545 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.439065 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.459255 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.478839 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.497536 4702 request.go:700] Waited for 1.018536573s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-ingress-operator/secrets?fieldSelector=metadata.name%3Dingress-operator-dockercfg-7lnqk&limit=500&resourceVersion=0 Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.499515 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.520483 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.546595 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.559622 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.580223 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.599738 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.619324 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.622209 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1cae19df-e9ee-4a4c-b00e-814682583af1-config\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.640186 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.660354 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.664504 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1cae19df-e9ee-4a4c-b00e-814682583af1-serving-cert\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.679555 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.700016 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.720352 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.739273 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.759845 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.780235 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.800175 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.821338 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.840416 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.859482 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.880224 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.899558 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.919504 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.960563 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dv94c\" (UniqueName: \"kubernetes.io/projected/8e260cd3-afc7-4f82-90de-e3cd459cc79a-kube-api-access-dv94c\") pod \"apiserver-7bbb656c7d-btsrc\" (UID: \"8e260cd3-afc7-4f82-90de-e3cd459cc79a\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.977055 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-z8pmz\" (UniqueName: \"kubernetes.io/projected/1683b7f6-ecde-4865-ace5-0d570070451d-kube-api-access-z8pmz\") pod \"cluster-samples-operator-665b6dd947-6fn9w\" (UID: \"1683b7f6-ecde-4865-ace5-0d570070451d\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:03 crc kubenswrapper[4702]: I1125 10:34:03.999214 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gnlfz\" (UniqueName: \"kubernetes.io/projected/738f53e3-118f-44e8-9b32-baeff939bef4-kube-api-access-gnlfz\") pod \"apiserver-76f77b778f-fh9fl\" (UID: \"738f53e3-118f-44e8-9b32-baeff939bef4\") " pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.019122 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbkpc\" (UniqueName: \"kubernetes.io/projected/843a15b4-ea74-48d1-8b1a-b8c64ddb91cf-kube-api-access-fbkpc\") pod \"console-operator-58897d9998-b2ph2\" (UID: \"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf\") " pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.036893 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6r9w4\" (UniqueName: \"kubernetes.io/projected/39e51e54-1814-4e9d-a6e0-42657e63a2c5-kube-api-access-6r9w4\") pod \"controller-manager-879f6c89f-kfhfz\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.038807 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.059490 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.080033 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.099641 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.119123 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.133156 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.139225 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.178981 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.194915 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.200013 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.213524 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.219412 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.225768 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.239844 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.257494 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.259645 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.279125 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.299021 4702 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.319548 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.339384 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.360278 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.365963 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-fh9fl"] Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.379391 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 25 10:34:04 crc kubenswrapper[4702]: W1125 10:34:04.380301 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod738f53e3_118f_44e8_9b32_baeff939bef4.slice/crio-b4647856d5c8156fd2a5a116b66830f60789371082365d1bf04677df2e145de7 WatchSource:0}: Error finding container b4647856d5c8156fd2a5a116b66830f60789371082365d1bf04677df2e145de7: Status 404 returned error can't find the container with id b4647856d5c8156fd2a5a116b66830f60789371082365d1bf04677df2e145de7 Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.399593 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.404829 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc"] Nov 25 10:34:04 crc kubenswrapper[4702]: W1125 10:34:04.418428 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod8e260cd3_afc7_4f82_90de_e3cd459cc79a.slice/crio-3719e10a7708f55a088da592e3e187c6635936694d085f26252acf6d7cb8fcf9 WatchSource:0}: Error finding container 3719e10a7708f55a088da592e3e187c6635936694d085f26252acf6d7cb8fcf9: Status 404 returned error can't find the container with id 3719e10a7708f55a088da592e3e187c6635936694d085f26252acf6d7cb8fcf9 Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.424183 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.438446 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w"] Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.440311 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.452361 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kfhfz"] Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.459797 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 25 10:34:04 crc kubenswrapper[4702]: W1125 10:34:04.472397 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod39e51e54_1814_4e9d_a6e0_42657e63a2c5.slice/crio-9f8ea866307115a6e976a20f4f451ebf864623bbd502164edfe4f220bd54a1ab WatchSource:0}: Error finding container 9f8ea866307115a6e976a20f4f451ebf864623bbd502164edfe4f220bd54a1ab: Status 404 returned error can't find the container with id 9f8ea866307115a6e976a20f4f451ebf864623bbd502164edfe4f220bd54a1ab Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.480206 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-b2ph2"] Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.497835 4702 request.go:700] Waited for 1.874580876s due to client-side throttling, not priority and fairness, request: POST:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/serviceaccounts/oauth-openshift/token Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.502035 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kdhcn\" (UniqueName: \"kubernetes.io/projected/6264f924-12b0-4afd-a7c3-ad6428ebfc79-kube-api-access-kdhcn\") pod \"kube-storage-version-migrator-operator-b67b599dd-5h6hm\" (UID: \"6264f924-12b0-4afd-a7c3-ad6428ebfc79\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.514051 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5jj9\" (UniqueName: \"kubernetes.io/projected/266ce950-00f4-440d-9196-6a4ab41404ea-kube-api-access-n5jj9\") pod \"oauth-openshift-558db77b4-w2kdw\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.538300 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vth9b\" (UniqueName: \"kubernetes.io/projected/b22715b1-a39f-4f63-a05d-4f49ce20b654-kube-api-access-vth9b\") pod \"dns-operator-744455d44c-sfbdd\" (UID: \"b22715b1-a39f-4f63-a05d-4f49ce20b654\") " pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.552557 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lmzcq\" (UniqueName: \"kubernetes.io/projected/b76bd594-1b9f-4aff-bde0-390b917fbf5e-kube-api-access-lmzcq\") pod \"openshift-apiserver-operator-796bbdcf4f-k86kg\" (UID: \"b76bd594-1b9f-4aff-bde0-390b917fbf5e\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.573399 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvdp7\" (UniqueName: \"kubernetes.io/projected/af02ba00-7c73-4bc3-a341-5dac59a49e12-kube-api-access-xvdp7\") pod \"machine-api-operator-5694c8668f-7s7k2\" (UID: \"af02ba00-7c73-4bc3-a341-5dac59a49e12\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.576466 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.593103 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lk587\" (UniqueName: \"kubernetes.io/projected/de0d09a1-663f-4a61-a609-b74ee9eb887e-kube-api-access-lk587\") pod \"openshift-config-operator-7777fb866f-cf8hl\" (UID: \"de0d09a1-663f-4a61-a609-b74ee9eb887e\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.619292 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9bsfm\" (UniqueName: \"kubernetes.io/projected/8101d279-dc32-48e9-80c8-52ea60394ca3-kube-api-access-9bsfm\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.640024 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8101d279-dc32-48e9-80c8-52ea60394ca3-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-n7wzw\" (UID: \"8101d279-dc32-48e9-80c8-52ea60394ca3\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.658134 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zx4q\" (UniqueName: \"kubernetes.io/projected/4468bda1-5b59-48d8-836b-87faa7f35726-kube-api-access-8zx4q\") pod \"authentication-operator-69f744f599-vnwhc\" (UID: \"4468bda1-5b59-48d8-836b-87faa7f35726\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.668741 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.682851 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.688767 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5chxv\" (UniqueName: \"kubernetes.io/projected/9923db54-633b-4725-87f8-384fa9feac18-kube-api-access-5chxv\") pod \"console-f9d7485db-zh82l\" (UID: \"9923db54-633b-4725-87f8-384fa9feac18\") " pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.689095 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.694657 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zcp4k\" (UniqueName: \"kubernetes.io/projected/a8621fa2-6cb1-4e0e-b1ed-3f254430262b-kube-api-access-zcp4k\") pod \"downloads-7954f5f757-2r4cg\" (UID: \"a8621fa2-6cb1-4e0e-b1ed-3f254430262b\") " pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.696283 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.704314 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.721850 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.733030 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hr4cq\" (UniqueName: \"kubernetes.io/projected/c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4-kube-api-access-hr4cq\") pod \"openshift-controller-manager-operator-756b6f6bc6-fld29\" (UID: \"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.747604 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-7s7k2"] Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.754295 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4mhf\" (UniqueName: \"kubernetes.io/projected/0fcf11f0-0a26-4ddd-a603-70ace7390469-kube-api-access-m4mhf\") pod \"multus-admission-controller-857f4d67dd-45qgf\" (UID: \"0fcf11f0-0a26-4ddd-a603-70ace7390469\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:04 crc kubenswrapper[4702]: W1125 10:34:04.769925 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podaf02ba00_7c73_4bc3_a341_5dac59a49e12.slice/crio-962ac2f0e02d6a5c3b3cba98660437bed9d2e54aa4390e8f590f11f305f92412 WatchSource:0}: Error finding container 962ac2f0e02d6a5c3b3cba98660437bed9d2e54aa4390e8f590f11f305f92412: Status 404 returned error can't find the container with id 962ac2f0e02d6a5c3b3cba98660437bed9d2e54aa4390e8f590f11f305f92412 Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.777395 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9vrff\" (UniqueName: \"kubernetes.io/projected/1cae19df-e9ee-4a4c-b00e-814682583af1-kube-api-access-9vrff\") pod \"service-ca-operator-777779d784-gg8sb\" (UID: \"1cae19df-e9ee-4a4c-b00e-814682583af1\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.817504 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/d8ad397b-3500-4791-b005-c6cc5d83e8b1-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-cr5t9\" (UID: \"d8ad397b-3500-4791-b005-c6cc5d83e8b1\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.819809 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pl8nz\" (UniqueName: \"kubernetes.io/projected/1c0109c0-09bf-407d-b336-e3ff9f6ecea6-kube-api-access-pl8nz\") pod \"package-server-manager-789f6589d5-bqhvf\" (UID: \"1c0109c0-09bf-407d-b336-e3ff9f6ecea6\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.820081 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.838262 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2mbc9\" (UniqueName: \"kubernetes.io/projected/097c4dd5-c85f-447c-9448-5969f491f49d-kube-api-access-2mbc9\") pod \"olm-operator-6b444d44fb-5mp5k\" (UID: \"097c4dd5-c85f-447c-9448-5969f491f49d\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.851345 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.857747 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.860935 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7vxs\" (UniqueName: \"kubernetes.io/projected/9402a523-ed8c-499b-bac9-0a0d6598ef52-kube-api-access-h7vxs\") pod \"route-controller-manager-6576b87f9c-n4r8n\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.876609 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tp5x6\" (UniqueName: \"kubernetes.io/projected/b96ae9ba-e486-417d-9d4b-f3a7ad987de3-kube-api-access-tp5x6\") pod \"service-ca-9c57cc56f-6ntqc\" (UID: \"b96ae9ba-e486-417d-9d4b-f3a7ad987de3\") " pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.872324 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.891607 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.902931 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pwg8\" (UniqueName: \"kubernetes.io/projected/5d45eacc-c698-4f1e-bccd-05d88696c983-kube-api-access-5pwg8\") pod \"catalog-operator-68c6474976-995q8\" (UID: \"5d45eacc-c698-4f1e-bccd-05d88696c983\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.923867 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xhgtr\" (UniqueName: \"kubernetes.io/projected/8138f6f9-e4df-436f-9b58-b9b3f3e80b26-kube-api-access-xhgtr\") pod \"migrator-59844c95c7-5klvj\" (UID: \"8138f6f9-e4df-436f-9b58-b9b3f3e80b26\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.929092 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.932321 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.933404 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.941458 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-sfbdd"] Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973160 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-auth-proxy-config\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973207 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb58e8de-46f8-426c-a656-c8d4ad37950e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973236 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-config\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973253 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-metrics-certs\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973287 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7835a751-7b09-4e97-94a6-1f920dc0fc15-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973324 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zjvgl\" (UniqueName: \"kubernetes.io/projected/d5393776-7502-4849-b157-6899da0bf181-kube-api-access-zjvgl\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973347 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b7m89\" (UniqueName: \"kubernetes.io/projected/eb58e8de-46f8-426c-a656-c8d4ad37950e-kube-api-access-b7m89\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973376 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jpwbr\" (UniqueName: \"kubernetes.io/projected/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-kube-api-access-jpwbr\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973392 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dmlq\" (UniqueName: \"kubernetes.io/projected/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-kube-api-access-4dmlq\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973406 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973422 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwsjk\" (UniqueName: \"kubernetes.io/projected/6c924c09-c172-4f11-91a5-86cb5949e5cd-kube-api-access-pwsjk\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973438 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6c924c09-c172-4f11-91a5-86cb5949e5cd-tmpfs\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973455 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7835a751-7b09-4e97-94a6-1f920dc0fc15-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973476 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-machine-approver-tls\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973499 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb2736bf-dd62-4b2a-982c-5aaa95671814-images\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973524 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74207563-11c3-4723-8375-7a61d6f27733-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973550 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6c924c09-c172-4f11-91a5-86cb5949e5cd-webhook-cert\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973590 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-ca\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.973605 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmp4c\" (UniqueName: \"kubernetes.io/projected/fb2736bf-dd62-4b2a-982c-5aaa95671814-kube-api-access-hmp4c\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.975374 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.975562 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fb2736bf-dd62-4b2a-982c-5aaa95671814-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.975913 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-config\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.975968 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb58e8de-46f8-426c-a656-c8d4ad37950e-proxy-tls\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.976653 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.976983 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-registry-tls\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977006 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-bound-sa-token\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977024 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7835a751-7b09-4e97-94a6-1f920dc0fc15-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977320 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977350 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-trusted-ca\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977386 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5lxns\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-kube-api-access-5lxns\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977415 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-metrics-tls\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977434 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-stats-auth\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977486 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d5393776-7502-4849-b157-6899da0bf181-service-ca-bundle\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977509 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977525 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3932611-ca66-44e6-bcd8-5d40328453b4-serving-cert\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977584 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-default-certificate\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977618 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l9t89\" (UniqueName: \"kubernetes.io/projected/e22e5523-d9e6-4257-bd76-b216c4bee1be-kube-api-access-l9t89\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977657 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6c924c09-c172-4f11-91a5-86cb5949e5cd-apiservice-cert\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977727 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-registry-certificates\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977742 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-trusted-ca\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977823 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74207563-11c3-4723-8375-7a61d6f27733-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977839 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-client\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977877 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-service-ca\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.977954 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fb2736bf-dd62-4b2a-982c-5aaa95671814-proxy-tls\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:04 crc kubenswrapper[4702]: E1125 10:34:04.978948 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.478934359 +0000 UTC m=+142.845530049 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:04 crc kubenswrapper[4702]: I1125 10:34:04.979032 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xvpch\" (UniqueName: \"kubernetes.io/projected/e3932611-ca66-44e6-bcd8-5d40328453b4-kube-api-access-xvpch\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.024788 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.080743 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.080942 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7835a751-7b09-4e97-94a6-1f920dc0fc15-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.081021 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.580994652 +0000 UTC m=+142.947590341 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081072 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32c118a6-a92d-47fb-8169-bccbb5e51072-config-volume\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081133 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/cf4f2a23-8387-4837-8635-0e76a2d340a4-certs\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081157 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zjvgl\" (UniqueName: \"kubernetes.io/projected/d5393776-7502-4849-b157-6899da0bf181-kube-api-access-zjvgl\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081208 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b7m89\" (UniqueName: \"kubernetes.io/projected/eb58e8de-46f8-426c-a656-c8d4ad37950e-kube-api-access-b7m89\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081247 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32c118a6-a92d-47fb-8169-bccbb5e51072-secret-volume\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081273 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jpwbr\" (UniqueName: \"kubernetes.io/projected/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-kube-api-access-jpwbr\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081657 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081716 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dmlq\" (UniqueName: \"kubernetes.io/projected/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-kube-api-access-4dmlq\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081737 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081759 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwsjk\" (UniqueName: \"kubernetes.io/projected/6c924c09-c172-4f11-91a5-86cb5949e5cd-kube-api-access-pwsjk\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081791 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/cf4f2a23-8387-4837-8635-0e76a2d340a4-node-bootstrap-token\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081813 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zbjwp\" (UniqueName: \"kubernetes.io/projected/26c8117b-b4b8-4563-980c-150a35aaf727-kube-api-access-zbjwp\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081849 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6c924c09-c172-4f11-91a5-86cb5949e5cd-tmpfs\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081868 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-machine-approver-tls\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081883 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7835a751-7b09-4e97-94a6-1f920dc0fc15-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081954 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74207563-11c3-4723-8375-7a61d6f27733-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.081985 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb2736bf-dd62-4b2a-982c-5aaa95671814-images\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082036 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6c924c09-c172-4f11-91a5-86cb5949e5cd-webhook-cert\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082057 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082086 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-56fr6\" (UniqueName: \"kubernetes.io/projected/cf4f2a23-8387-4837-8635-0e76a2d340a4-kube-api-access-56fr6\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082107 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-ca\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082121 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmp4c\" (UniqueName: \"kubernetes.io/projected/fb2736bf-dd62-4b2a-982c-5aaa95671814-kube-api-access-hmp4c\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082137 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fb2736bf-dd62-4b2a-982c-5aaa95671814-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082160 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-config\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082175 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d2bb7ed5-d0b7-4157-a889-5331ba873fde-config-volume\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082201 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb58e8de-46f8-426c-a656-c8d4ad37950e-proxy-tls\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082233 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-socket-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082248 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-registration-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082294 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082323 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-registry-tls\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082349 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-bound-sa-token\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082363 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7835a751-7b09-4e97-94a6-1f920dc0fc15-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082410 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.082432 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-trusted-ca\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083775 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5lxns\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-kube-api-access-5lxns\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083795 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d2bb7ed5-d0b7-4157-a889-5331ba873fde-metrics-tls\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083817 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-metrics-tls\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083841 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d5393776-7502-4849-b157-6899da0bf181-service-ca-bundle\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083856 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-stats-auth\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083870 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-csi-data-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083888 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3932611-ca66-44e6-bcd8-5d40328453b4-serving-cert\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083922 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-default-certificate\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083927 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74207563-11c3-4723-8375-7a61d6f27733-ca-trust-extracted\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083949 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.083999 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l9t89\" (UniqueName: \"kubernetes.io/projected/e22e5523-d9e6-4257-bd76-b216c4bee1be-kube-api-access-l9t89\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084025 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6c924c09-c172-4f11-91a5-86cb5949e5cd-apiservice-cert\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084059 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-registry-certificates\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084076 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-trusted-ca\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084093 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wqmst\" (UniqueName: \"kubernetes.io/projected/853aac53-23c5-4f78-a291-bc82dff9e338-kube-api-access-wqmst\") pod \"control-plane-machine-set-operator-78cbb6b69f-4jnl2\" (UID: \"853aac53-23c5-4f78-a291-bc82dff9e338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084122 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74207563-11c3-4723-8375-7a61d6f27733-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084136 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-client\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084149 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fb8vj\" (UniqueName: \"kubernetes.io/projected/d2bb7ed5-d0b7-4157-a889-5331ba873fde-kube-api-access-fb8vj\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084176 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-plugins-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084190 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66d2db93-b876-4750-86ff-5887f35a7f84-cert\") pod \"ingress-canary-6qwtj\" (UID: \"66d2db93-b876-4750-86ff-5887f35a7f84\") " pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084230 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-service-ca\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084245 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fb2736bf-dd62-4b2a-982c-5aaa95671814-proxy-tls\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084261 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-config\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084277 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-mountpoint-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084310 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xvpch\" (UniqueName: \"kubernetes.io/projected/e3932611-ca66-44e6-bcd8-5d40328453b4-kube-api-access-xvpch\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084357 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntwj9\" (UniqueName: \"kubernetes.io/projected/32c118a6-a92d-47fb-8169-bccbb5e51072-kube-api-access-ntwj9\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084378 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-auth-proxy-config\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084404 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/853aac53-23c5-4f78-a291-bc82dff9e338-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4jnl2\" (UID: \"853aac53-23c5-4f78-a291-bc82dff9e338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084451 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb58e8de-46f8-426c-a656-c8d4ad37950e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084475 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-config\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084493 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-metrics-certs\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.084512 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n4ll\" (UniqueName: \"kubernetes.io/projected/66d2db93-b876-4750-86ff-5887f35a7f84-kube-api-access-8n4ll\") pod \"ingress-canary-6qwtj\" (UID: \"66d2db93-b876-4750-86ff-5887f35a7f84\") " pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.085925 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7835a751-7b09-4e97-94a6-1f920dc0fc15-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.086115 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/fb2736bf-dd62-4b2a-982c-5aaa95671814-images\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.086229 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.087469 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/6c924c09-c172-4f11-91a5-86cb5949e5cd-tmpfs\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.088050 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-ca\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.089568 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/eb58e8de-46f8-426c-a656-c8d4ad37950e-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.089948 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-config\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.090126 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-auth-proxy-config\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.090205 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-registry-certificates\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.092261 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-trusted-ca\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.092537 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-trusted-ca\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.092582 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-service-ca\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.093271 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.593248891 +0000 UTC m=+142.959844610 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.093349 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/d5393776-7502-4849-b157-6899da0bf181-service-ca-bundle\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.094354 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fb2736bf-dd62-4b2a-982c-5aaa95671814-auth-proxy-config\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.096198 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.104028 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.104128 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e3932611-ca66-44e6-bcd8-5d40328453b4-config\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.105579 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/6c924c09-c172-4f11-91a5-86cb5949e5cd-apiservice-cert\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.106362 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74207563-11c3-4723-8375-7a61d6f27733-installation-pull-secrets\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.107137 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.108457 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7835a751-7b09-4e97-94a6-1f920dc0fc15-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.108492 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e3932611-ca66-44e6-bcd8-5d40328453b4-serving-cert\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.113640 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fb2736bf-dd62-4b2a-982c-5aaa95671814-proxy-tls\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.114384 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/eb58e8de-46f8-426c-a656-c8d4ad37950e-proxy-tls\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.114630 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/e3932611-ca66-44e6-bcd8-5d40328453b4-etcd-client\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.114845 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-stats-auth\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.116425 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-metrics-tls\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.116529 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-machine-approver-tls\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.116675 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.117688 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-registry-tls\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.117696 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/6c924c09-c172-4f11-91a5-86cb5949e5cd-webhook-cert\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.118642 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-default-certificate\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.119148 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" event={"ID":"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf","Type":"ContainerStarted","Data":"0e338830eda6d0ff06c92c4854a412dcccbba44d757fa96a67d90f2acf9a6c0a"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.119190 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" event={"ID":"843a15b4-ea74-48d1-8b1a-b8c64ddb91cf","Type":"ContainerStarted","Data":"e595d5fd9da9847bbc22926d7a9537f99884d50714e785d053cd4beda2eaadad"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.122529 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/d5393776-7502-4849-b157-6899da0bf181-metrics-certs\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.123041 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" event={"ID":"1683b7f6-ecde-4865-ace5-0d570070451d","Type":"ContainerStarted","Data":"966cc88eb5edede5f80d4f849f38254d9cf3cb370bec0b538ef238eb8dd4e14c"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.123069 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" event={"ID":"1683b7f6-ecde-4865-ace5-0d570070451d","Type":"ContainerStarted","Data":"1b3297a48e210324f5fa6f01030da2ce4e85330e07362b99924c013d30d0ac67"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.128632 4702 generic.go:334] "Generic (PLEG): container finished" podID="738f53e3-118f-44e8-9b32-baeff939bef4" containerID="5e1229c4f8c05d34c7a785faebb2a8eee8c122f820669257dc8e0ee1799ce0d8" exitCode=0 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.128700 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" event={"ID":"738f53e3-118f-44e8-9b32-baeff939bef4","Type":"ContainerDied","Data":"5e1229c4f8c05d34c7a785faebb2a8eee8c122f820669257dc8e0ee1799ce0d8"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.128725 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" event={"ID":"738f53e3-118f-44e8-9b32-baeff939bef4","Type":"ContainerStarted","Data":"b4647856d5c8156fd2a5a116b66830f60789371082365d1bf04677df2e145de7"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.133866 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" event={"ID":"39e51e54-1814-4e9d-a6e0-42657e63a2c5","Type":"ContainerStarted","Data":"6e6710fb8fd130390654694eee50a1fdfaa138aa109365b9f656e3db821e0ed9"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.133927 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" event={"ID":"39e51e54-1814-4e9d-a6e0-42657e63a2c5","Type":"ContainerStarted","Data":"9f8ea866307115a6e976a20f4f451ebf864623bbd502164edfe4f220bd54a1ab"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.135324 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" event={"ID":"b22715b1-a39f-4f63-a05d-4f49ce20b654","Type":"ContainerStarted","Data":"bc41dd652439be9d890675feb910cf320bd24eaecb451e47e3a891c63ad5ee04"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.136261 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" event={"ID":"af02ba00-7c73-4bc3-a341-5dac59a49e12","Type":"ContainerStarted","Data":"962ac2f0e02d6a5c3b3cba98660437bed9d2e54aa4390e8f590f11f305f92412"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.137284 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" event={"ID":"de0d09a1-663f-4a61-a609-b74ee9eb887e","Type":"ContainerStarted","Data":"a06af1f071165bd0ab2a37213ec29b13fca79de26c81564fc044973369db2395"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.138378 4702 generic.go:334] "Generic (PLEG): container finished" podID="8e260cd3-afc7-4f82-90de-e3cd459cc79a" containerID="5909975624df1293574c06072f47db59fcdb01890decc77d0c665cb2ec8b6163" exitCode=0 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.138412 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" event={"ID":"8e260cd3-afc7-4f82-90de-e3cd459cc79a","Type":"ContainerDied","Data":"5909975624df1293574c06072f47db59fcdb01890decc77d0c665cb2ec8b6163"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.138431 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" event={"ID":"8e260cd3-afc7-4f82-90de-e3cd459cc79a","Type":"ContainerStarted","Data":"3719e10a7708f55a088da592e3e187c6635936694d085f26252acf6d7cb8fcf9"} Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.145845 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.167836 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-bound-sa-token\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.168430 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zjvgl\" (UniqueName: \"kubernetes.io/projected/d5393776-7502-4849-b157-6899da0bf181-kube-api-access-zjvgl\") pod \"router-default-5444994796-t7mgq\" (UID: \"d5393776-7502-4849-b157-6899da0bf181\") " pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.170509 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" Nov 25 10:34:05 crc kubenswrapper[4702]: W1125 10:34:05.176542 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6264f924_12b0_4afd_a7c3_ad6428ebfc79.slice/crio-9dcae2862b25a95464fe2491874ef73ab1496cdf284dbfcabce8c84f25fd9e05 WatchSource:0}: Error finding container 9dcae2862b25a95464fe2491874ef73ab1496cdf284dbfcabce8c84f25fd9e05: Status 404 returned error can't find the container with id 9dcae2862b25a95464fe2491874ef73ab1496cdf284dbfcabce8c84f25fd9e05 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.185561 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/7835a751-7b09-4e97-94a6-1f920dc0fc15-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-6x8kb\" (UID: \"7835a751-7b09-4e97-94a6-1f920dc0fc15\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.185666 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.185873 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fb8vj\" (UniqueName: \"kubernetes.io/projected/d2bb7ed5-d0b7-4157-a889-5331ba873fde-kube-api-access-fb8vj\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.185923 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-plugins-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.185951 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66d2db93-b876-4750-86ff-5887f35a7f84-cert\") pod \"ingress-canary-6qwtj\" (UID: \"66d2db93-b876-4750-86ff-5887f35a7f84\") " pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.185979 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-config\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186000 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-mountpoint-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186044 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntwj9\" (UniqueName: \"kubernetes.io/projected/32c118a6-a92d-47fb-8169-bccbb5e51072-kube-api-access-ntwj9\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186072 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/853aac53-23c5-4f78-a291-bc82dff9e338-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4jnl2\" (UID: \"853aac53-23c5-4f78-a291-bc82dff9e338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186103 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n4ll\" (UniqueName: \"kubernetes.io/projected/66d2db93-b876-4750-86ff-5887f35a7f84-kube-api-access-8n4ll\") pod \"ingress-canary-6qwtj\" (UID: \"66d2db93-b876-4750-86ff-5887f35a7f84\") " pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186131 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32c118a6-a92d-47fb-8169-bccbb5e51072-config-volume\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186165 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/cf4f2a23-8387-4837-8635-0e76a2d340a4-certs\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186199 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32c118a6-a92d-47fb-8169-bccbb5e51072-secret-volume\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186241 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186274 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/cf4f2a23-8387-4837-8635-0e76a2d340a4-node-bootstrap-token\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186297 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zbjwp\" (UniqueName: \"kubernetes.io/projected/26c8117b-b4b8-4563-980c-150a35aaf727-kube-api-access-zbjwp\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186329 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186352 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-56fr6\" (UniqueName: \"kubernetes.io/projected/cf4f2a23-8387-4837-8635-0e76a2d340a4-kube-api-access-56fr6\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186386 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d2bb7ed5-d0b7-4157-a889-5331ba873fde-config-volume\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186408 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-socket-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186429 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-registration-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186492 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d2bb7ed5-d0b7-4157-a889-5331ba873fde-metrics-tls\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186516 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-csi-data-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.186575 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wqmst\" (UniqueName: \"kubernetes.io/projected/853aac53-23c5-4f78-a291-bc82dff9e338-kube-api-access-wqmst\") pod \"control-plane-machine-set-operator-78cbb6b69f-4jnl2\" (UID: \"853aac53-23c5-4f78-a291-bc82dff9e338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.186821 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.686802201 +0000 UTC m=+143.053397900 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.187116 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-plugins-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.196559 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-config\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.196657 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-mountpoint-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.196666 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/66d2db93-b876-4750-86ff-5887f35a7f84-cert\") pod \"ingress-canary-6qwtj\" (UID: \"66d2db93-b876-4750-86ff-5887f35a7f84\") " pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.196814 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-socket-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.196880 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-registration-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.197093 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/d2bb7ed5-d0b7-4157-a889-5331ba873fde-config-volume\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.197479 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/26c8117b-b4b8-4563-980c-150a35aaf727-csi-data-dir\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.197725 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.198685 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32c118a6-a92d-47fb-8169-bccbb5e51072-config-volume\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.200633 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/cf4f2a23-8387-4837-8635-0e76a2d340a4-node-bootstrap-token\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.201479 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/d2bb7ed5-d0b7-4157-a889-5331ba873fde-metrics-tls\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.209439 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/853aac53-23c5-4f78-a291-bc82dff9e338-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-4jnl2\" (UID: \"853aac53-23c5-4f78-a291-bc82dff9e338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.209961 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.210416 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b7m89\" (UniqueName: \"kubernetes.io/projected/eb58e8de-46f8-426c-a656-c8d4ad37950e-kube-api-access-b7m89\") pod \"machine-config-controller-84d6567774-l6bw4\" (UID: \"eb58e8de-46f8-426c-a656-c8d4ad37950e\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.213687 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32c118a6-a92d-47fb-8169-bccbb5e51072-secret-volume\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.215043 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/cf4f2a23-8387-4837-8635-0e76a2d340a4-certs\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.221441 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jpwbr\" (UniqueName: \"kubernetes.io/projected/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-kube-api-access-jpwbr\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.239668 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dmlq\" (UniqueName: \"kubernetes.io/projected/d70c50e1-8a6b-4f9b-8d03-79eff4b911d4-kube-api-access-4dmlq\") pod \"machine-approver-56656f9798-j84mt\" (UID: \"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.265851 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwsjk\" (UniqueName: \"kubernetes.io/projected/6c924c09-c172-4f11-91a5-86cb5949e5cd-kube-api-access-pwsjk\") pod \"packageserver-d55dfcdfc-t5vl4\" (UID: \"6c924c09-c172-4f11-91a5-86cb5949e5cd\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.276221 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.281175 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-w2kdw"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.288528 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.288933 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.788919875 +0000 UTC m=+143.155515554 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.294120 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5lxns\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-kube-api-access-5lxns\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.296654 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-2r4cg"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.302399 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l9t89\" (UniqueName: \"kubernetes.io/projected/e22e5523-d9e6-4257-bd76-b216c4bee1be-kube-api-access-l9t89\") pod \"marketplace-operator-79b997595-d5g4l\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.322705 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/f2831aac-4bc0-46ae-8a5b-b7966380ad2e-bound-sa-token\") pod \"ingress-operator-5b745b69d9-qfr6v\" (UID: \"f2831aac-4bc0-46ae-8a5b-b7966380ad2e\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.328353 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.342648 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xvpch\" (UniqueName: \"kubernetes.io/projected/e3932611-ca66-44e6-bcd8-5d40328453b4-kube-api-access-xvpch\") pod \"etcd-operator-b45778765-8kd6d\" (UID: \"e3932611-ca66-44e6-bcd8-5d40328453b4\") " pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.355990 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmp4c\" (UniqueName: \"kubernetes.io/projected/fb2736bf-dd62-4b2a-982c-5aaa95671814-kube-api-access-hmp4c\") pod \"machine-config-operator-74547568cd-7n7vp\" (UID: \"fb2736bf-dd62-4b2a-982c-5aaa95671814\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: W1125 10:34:05.367137 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb76bd594_1b9f_4aff_bde0_390b917fbf5e.slice/crio-99b90b0dbd2bfde601840d90e7dec567ab676550825bcfd378aea61f82ced3de WatchSource:0}: Error finding container 99b90b0dbd2bfde601840d90e7dec567ab676550825bcfd378aea61f82ced3de: Status 404 returned error can't find the container with id 99b90b0dbd2bfde601840d90e7dec567ab676550825bcfd378aea61f82ced3de Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.389513 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.389675 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.889647227 +0000 UTC m=+143.256242916 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.390301 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.390685 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.890671168 +0000 UTC m=+143.257266857 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.396339 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wqmst\" (UniqueName: \"kubernetes.io/projected/853aac53-23c5-4f78-a291-bc82dff9e338-kube-api-access-wqmst\") pod \"control-plane-machine-set-operator-78cbb6b69f-4jnl2\" (UID: \"853aac53-23c5-4f78-a291-bc82dff9e338\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.412580 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.417120 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fb8vj\" (UniqueName: \"kubernetes.io/projected/d2bb7ed5-d0b7-4157-a889-5331ba873fde-kube-api-access-fb8vj\") pod \"dns-default-s8rd8\" (UID: \"d2bb7ed5-d0b7-4157-a889-5331ba873fde\") " pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.427553 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.433301 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.450062 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zbjwp\" (UniqueName: \"kubernetes.io/projected/26c8117b-b4b8-4563-980c-150a35aaf727-kube-api-access-zbjwp\") pod \"csi-hostpathplugin-74xrt\" (UID: \"26c8117b-b4b8-4563-980c-150a35aaf727\") " pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.457084 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.462094 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.474475 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntwj9\" (UniqueName: \"kubernetes.io/projected/32c118a6-a92d-47fb-8169-bccbb5e51072-kube-api-access-ntwj9\") pod \"collect-profiles-29401110-dcvld\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.474789 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n4ll\" (UniqueName: \"kubernetes.io/projected/66d2db93-b876-4750-86ff-5887f35a7f84-kube-api-access-8n4ll\") pod \"ingress-canary-6qwtj\" (UID: \"66d2db93-b876-4750-86ff-5887f35a7f84\") " pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.478157 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.485095 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.491156 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.491586 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:05.991567615 +0000 UTC m=+143.358163304 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.516951 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-56fr6\" (UniqueName: \"kubernetes.io/projected/cf4f2a23-8387-4837-8635-0e76a2d340a4-kube-api-access-56fr6\") pod \"machine-config-server-gn6ks\" (UID: \"cf4f2a23-8387-4837-8635-0e76a2d340a4\") " pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.534674 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b47e3ff7-ea18-461e-ae57-fdbeb817aa45-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-qpctp\" (UID: \"b47e3ff7-ea18-461e-ae57-fdbeb817aa45\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.537234 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.547210 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.558282 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.568260 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.580050 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.584540 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-45qgf"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.590074 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-gn6ks" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.592151 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.593345 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.093327519 +0000 UTC m=+143.459923208 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.598062 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-6qwtj" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.613186 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" Nov 25 10:34:05 crc kubenswrapper[4702]: W1125 10:34:05.613656 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0fcf11f0_0a26_4ddd_a603_70ace7390469.slice/crio-f2c88e4d417dda25af14ec62adcb82f25c9a6ff0e567385ee880b455fc8baa56 WatchSource:0}: Error finding container f2c88e4d417dda25af14ec62adcb82f25c9a6ff0e567385ee880b455fc8baa56: Status 404 returned error can't find the container with id f2c88e4d417dda25af14ec62adcb82f25c9a6ff0e567385ee880b455fc8baa56 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.615075 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-vnwhc"] Nov 25 10:34:05 crc kubenswrapper[4702]: W1125 10:34:05.620989 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc3d9f60f_f4a5_48c5_a80f_88fb44cff0e4.slice/crio-eba1d22457cfffb0df6dd71c5999850ed9dbfa3b471e0951099657e1feaa5392 WatchSource:0}: Error finding container eba1d22457cfffb0df6dd71c5999850ed9dbfa3b471e0951099657e1feaa5392: Status 404 returned error can't find the container with id eba1d22457cfffb0df6dd71c5999850ed9dbfa3b471e0951099657e1feaa5392 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.624012 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.624061 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.638495 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.699289 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.699815 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.199792957 +0000 UTC m=+143.566388646 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.801768 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.804307 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.304286585 +0000 UTC m=+143.670882284 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.832047 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.856558 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-zh82l"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.873828 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.877023 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.907494 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.907748 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.40771043 +0000 UTC m=+143.774306129 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.907963 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:05 crc kubenswrapper[4702]: E1125 10:34:05.908618 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.408598108 +0000 UTC m=+143.775193797 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.942429 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp"] Nov 25 10:34:05 crc kubenswrapper[4702]: W1125 10:34:05.943867 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod097c4dd5_c85f_447c_9448_5969f491f49d.slice/crio-b3ce343d2e9f2abfed1c23a01281fcdfe0ab0430759138ec3af14973da56a667 WatchSource:0}: Error finding container b3ce343d2e9f2abfed1c23a01281fcdfe0ab0430759138ec3af14973da56a667: Status 404 returned error can't find the container with id b3ce343d2e9f2abfed1c23a01281fcdfe0ab0430759138ec3af14973da56a667 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.949179 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-6ntqc"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.949216 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n"] Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.954721 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8"] Nov 25 10:34:05 crc kubenswrapper[4702]: W1125 10:34:05.975860 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb96ae9ba_e486_417d_9d4b_f3a7ad987de3.slice/crio-ada8a350c56b5d467426cb6077d07bd81493980d5a04e6538c259fe082854c25 WatchSource:0}: Error finding container ada8a350c56b5d467426cb6077d07bd81493980d5a04e6538c259fe082854c25: Status 404 returned error can't find the container with id ada8a350c56b5d467426cb6077d07bd81493980d5a04e6538c259fe082854c25 Nov 25 10:34:05 crc kubenswrapper[4702]: I1125 10:34:05.991420 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.009362 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.010109 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.510086213 +0000 UTC m=+143.876681902 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.045609 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.088318 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d5g4l"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.123615 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.123940 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.623927499 +0000 UTC m=+143.990523178 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.145527 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.150201 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" event={"ID":"1cae19df-e9ee-4a4c-b00e-814682583af1","Type":"ContainerStarted","Data":"611fd4c2e2dc720794a909476fb3d0cdb0356cd083231b2e9c595f388bc8cfb1"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.152251 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" event={"ID":"b22715b1-a39f-4f63-a05d-4f49ce20b654","Type":"ContainerStarted","Data":"f0eb09ec860ad79ea95645dfa9ba0676cbfd040756821cb2284c773be44123ca"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.153017 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" event={"ID":"fb2736bf-dd62-4b2a-982c-5aaa95671814","Type":"ContainerStarted","Data":"da503685cb2f9d60d250c2576524db16c88631a824d00b33f4c60b63c6df2b61"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.154473 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" event={"ID":"9402a523-ed8c-499b-bac9-0a0d6598ef52","Type":"ContainerStarted","Data":"67bc7acef838e847b451eb7a035523d0cac6e639a66286dcbcc54f0aecbd2427"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.155869 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-t7mgq" event={"ID":"d5393776-7502-4849-b157-6899da0bf181","Type":"ContainerStarted","Data":"42671744e5b6bfa495b780a54862d5c51b3f986d76d9adf0813c93e304cc81ab"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.155916 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-t7mgq" event={"ID":"d5393776-7502-4849-b157-6899da0bf181","Type":"ContainerStarted","Data":"eff2bd9b8792976299c09ff33fc419bb5b835f4eca38bbe2537a692d46b41dfb"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.159088 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" event={"ID":"8138f6f9-e4df-436f-9b58-b9b3f3e80b26","Type":"ContainerStarted","Data":"e62e937a59a134429ab4e659b93e4a3cf376a3076aeea4efa9b56ccae9fccd8e"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.160391 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zh82l" event={"ID":"9923db54-633b-4725-87f8-384fa9feac18","Type":"ContainerStarted","Data":"88982869c7619f8f2c2653204c2931b0cf469fcf234463521d0f32e748f8c858"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.162268 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" event={"ID":"8101d279-dc32-48e9-80c8-52ea60394ca3","Type":"ContainerStarted","Data":"135b668df87e5dc493470ed93b8948c050c20f69a732b53524b9a6553692d26a"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.163650 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" event={"ID":"de0d09a1-663f-4a61-a609-b74ee9eb887e","Type":"ContainerStarted","Data":"b5961cd8b3df8776bba8fddc0e4e30c0f4e568df6e4ba8862b6a83ac128e1059"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.173313 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" event={"ID":"b76bd594-1b9f-4aff-bde0-390b917fbf5e","Type":"ContainerStarted","Data":"99b90b0dbd2bfde601840d90e7dec567ab676550825bcfd378aea61f82ced3de"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.175503 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.177262 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" event={"ID":"eb58e8de-46f8-426c-a656-c8d4ad37950e","Type":"ContainerStarted","Data":"91938c6f184af948d7ed642b529c4cd7f0d02a6e811c79a7601de65c58520e58"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.178554 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" event={"ID":"6264f924-12b0-4afd-a7c3-ad6428ebfc79","Type":"ContainerStarted","Data":"70d2f6217066baa7347da0b27db7cf5f572d596ecfcde0dcec339c4b1b4d34ab"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.178589 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" event={"ID":"6264f924-12b0-4afd-a7c3-ad6428ebfc79","Type":"ContainerStarted","Data":"9dcae2862b25a95464fe2491874ef73ab1496cdf284dbfcabce8c84f25fd9e05"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.186319 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" event={"ID":"1683b7f6-ecde-4865-ace5-0d570070451d","Type":"ContainerStarted","Data":"952482987a4dea198ee034aa6abb8f8f5bcb036bf11b0f99d5eed1bf2752e9f5"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.201965 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" event={"ID":"d8ad397b-3500-4791-b005-c6cc5d83e8b1","Type":"ContainerStarted","Data":"2d8837193ed3a45cc6fbb9870fb1aa957c9f6083c07551d1fe236bcfec492bd4"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.202866 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" event={"ID":"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4","Type":"ContainerStarted","Data":"eba1d22457cfffb0df6dd71c5999850ed9dbfa3b471e0951099657e1feaa5392"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.211765 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" event={"ID":"266ce950-00f4-440d-9196-6a4ab41404ea","Type":"ContainerStarted","Data":"56e050f7cde6ec02649b6ccfd6c224544da6b647abf5750b6adf8e9c0461eba6"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.217319 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" event={"ID":"097c4dd5-c85f-447c-9448-5969f491f49d","Type":"ContainerStarted","Data":"b3ce343d2e9f2abfed1c23a01281fcdfe0ab0430759138ec3af14973da56a667"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.222394 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" event={"ID":"4468bda1-5b59-48d8-836b-87faa7f35726","Type":"ContainerStarted","Data":"889eeb0b640f0e18da4ab6c442dbfd2d1b5237ec9d26eaca14ace1cd340839a1"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.224596 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.224760 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.724739303 +0000 UTC m=+144.091335012 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.224883 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" event={"ID":"0fcf11f0-0a26-4ddd-a603-70ace7390469","Type":"ContainerStarted","Data":"f2c88e4d417dda25af14ec62adcb82f25c9a6ff0e567385ee880b455fc8baa56"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.225024 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.225433 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.725421105 +0000 UTC m=+144.092016794 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.225622 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" event={"ID":"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4","Type":"ContainerStarted","Data":"121a0e88a82585a597a8f2ac2a1eb163677e0c9cfeef61fce8ae957aad586d34"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.226468 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" event={"ID":"5d45eacc-c698-4f1e-bccd-05d88696c983","Type":"ContainerStarted","Data":"3dda6ed55824558e570c1cbfb6fa36e8d6857c1d236b073c9fb9e26741e07acc"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.227756 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gn6ks" event={"ID":"cf4f2a23-8387-4837-8635-0e76a2d340a4","Type":"ContainerStarted","Data":"cad059d6da7bdef0988a7774fb8f33d127241b7eaaa82bbe4422e39472bc4d9b"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.229741 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" event={"ID":"af02ba00-7c73-4bc3-a341-5dac59a49e12","Type":"ContainerStarted","Data":"a6b5ada40d9480a4a70e11fba7df782847f9ad2fd6d7e1c67d35f919672a3e36"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.231326 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" event={"ID":"1c0109c0-09bf-407d-b336-e3ff9f6ecea6","Type":"ContainerStarted","Data":"06e09e0e542dbe1c7ab1420088b266720fe7a979cf127c9ddfd70d7b6cc2c859"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.232719 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" event={"ID":"e22e5523-d9e6-4257-bd76-b216c4bee1be","Type":"ContainerStarted","Data":"eeb737073517eb8f011a662c4fce9f88a45c7e9e28e2a566b7a4593c27ca45ae"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.233888 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2r4cg" event={"ID":"a8621fa2-6cb1-4e0e-b1ed-3f254430262b","Type":"ContainerStarted","Data":"91f13a19d21404f5e507e08116dc83e2ce67255492c9c90958bee7a718fae12a"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.233941 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2r4cg" event={"ID":"a8621fa2-6cb1-4e0e-b1ed-3f254430262b","Type":"ContainerStarted","Data":"a523c22ebe4e0be731613778ba0cb19680be4a704926569fb6cae4c730e8b01f"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.235103 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" event={"ID":"b96ae9ba-e486-417d-9d4b-f3a7ad987de3","Type":"ContainerStarted","Data":"ada8a350c56b5d467426cb6077d07bd81493980d5a04e6538c259fe082854c25"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.236234 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" event={"ID":"7835a751-7b09-4e97-94a6-1f920dc0fc15","Type":"ContainerStarted","Data":"66d1ff9c736eed2497cafd60a8c6f8392df69305dbbfdbf45f16c740e749b76b"} Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.269946 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-6qwtj"] Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.325972 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.82595542 +0000 UTC m=+144.192551109 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.326043 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.328417 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.329434 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.829383386 +0000 UTC m=+144.195979155 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: W1125 10:34:06.355077 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod66d2db93_b876_4750_86ff_5887f35a7f84.slice/crio-79eeb63dfb62fa0dbda59974d8b18d3673c97ac93c025dbea4c4798ebac1c9b8 WatchSource:0}: Error finding container 79eeb63dfb62fa0dbda59974d8b18d3673c97ac93c025dbea4c4798ebac1c9b8: Status 404 returned error can't find the container with id 79eeb63dfb62fa0dbda59974d8b18d3673c97ac93c025dbea4c4798ebac1c9b8 Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.429590 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.430003 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:06.929987124 +0000 UTC m=+144.296582813 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.479490 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp"] Nov 25 10:34:06 crc kubenswrapper[4702]: W1125 10:34:06.519562 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb47e3ff7_ea18_461e_ae57_fdbeb817aa45.slice/crio-b45a691ae4a656cfb9f08b0996f57b4ae5c49bbb7ccda248d7705f608f8b64a0 WatchSource:0}: Error finding container b45a691ae4a656cfb9f08b0996f57b4ae5c49bbb7ccda248d7705f608f8b64a0: Status 404 returned error can't find the container with id b45a691ae4a656cfb9f08b0996f57b4ae5c49bbb7ccda248d7705f608f8b64a0 Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.530775 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.531131 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.031116368 +0000 UTC m=+144.397712147 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.566128 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.622802 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-s8rd8"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.627014 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.633500 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.634226 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.134197472 +0000 UTC m=+144.500793161 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.636198 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-8kd6d"] Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.641212 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-74xrt"] Nov 25 10:34:06 crc kubenswrapper[4702]: W1125 10:34:06.715089 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod853aac53_23c5_4f78_a291_bc82dff9e338.slice/crio-be0c059d587bde9979dd4c8b9bcbb0d7d0e9fb2b46040818d8d476840a5a27a5 WatchSource:0}: Error finding container be0c059d587bde9979dd4c8b9bcbb0d7d0e9fb2b46040818d8d476840a5a27a5: Status 404 returned error can't find the container with id be0c059d587bde9979dd4c8b9bcbb0d7d0e9fb2b46040818d8d476840a5a27a5 Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.734883 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.735197 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.235178091 +0000 UTC m=+144.601773780 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: W1125 10:34:06.751810 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd2bb7ed5_d0b7_4157_a889_5331ba873fde.slice/crio-23cafce034c0b268eaa5a384ad5252347f43dc712fc00ed518021d20ad8a7f98 WatchSource:0}: Error finding container 23cafce034c0b268eaa5a384ad5252347f43dc712fc00ed518021d20ad8a7f98: Status 404 returned error can't find the container with id 23cafce034c0b268eaa5a384ad5252347f43dc712fc00ed518021d20ad8a7f98 Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.836286 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.836789 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.33677098 +0000 UTC m=+144.703366659 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:06 crc kubenswrapper[4702]: I1125 10:34:06.938545 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:06 crc kubenswrapper[4702]: E1125 10:34:06.938928 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.438913605 +0000 UTC m=+144.805509294 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.040931 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.041441 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.541398791 +0000 UTC m=+144.907994480 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.041777 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.042302 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.542285518 +0000 UTC m=+144.908881207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.054276 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" podStartSLOduration=124.054249198 podStartE2EDuration="2m4.054249198s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:07.054163195 +0000 UTC m=+144.420758884" watchObservedRunningTime="2025-11-25 10:34:07.054249198 +0000 UTC m=+144.420844897" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.098691 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" podStartSLOduration=124.09866097 podStartE2EDuration="2m4.09866097s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:07.091997494 +0000 UTC m=+144.458593183" watchObservedRunningTime="2025-11-25 10:34:07.09866097 +0000 UTC m=+144.465256659" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.142886 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.144559 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.643856816 +0000 UTC m=+145.010452495 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.144697 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.145134 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.645117265 +0000 UTC m=+145.011712954 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.245427 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.245923 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.745884788 +0000 UTC m=+145.112480487 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.247858 4702 generic.go:334] "Generic (PLEG): container finished" podID="de0d09a1-663f-4a61-a609-b74ee9eb887e" containerID="b5961cd8b3df8776bba8fddc0e4e30c0f4e568df6e4ba8862b6a83ac128e1059" exitCode=0 Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.247963 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" event={"ID":"de0d09a1-663f-4a61-a609-b74ee9eb887e","Type":"ContainerDied","Data":"b5961cd8b3df8776bba8fddc0e4e30c0f4e568df6e4ba8862b6a83ac128e1059"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.250362 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-zh82l" event={"ID":"9923db54-633b-4725-87f8-384fa9feac18","Type":"ContainerStarted","Data":"bf5a163677b24748ce56df9a72e8b5e672aca7e5849dcc6c1bcdada847321eac"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.251624 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" event={"ID":"853aac53-23c5-4f78-a291-bc82dff9e338","Type":"ContainerStarted","Data":"be0c059d587bde9979dd4c8b9bcbb0d7d0e9fb2b46040818d8d476840a5a27a5"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.255275 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" event={"ID":"6c924c09-c172-4f11-91a5-86cb5949e5cd","Type":"ContainerStarted","Data":"ab4fe0d763e4fd6964f40957d398a425f3ae0184a4a46ab670f8733004d2c9c1"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.256770 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" event={"ID":"26c8117b-b4b8-4563-980c-150a35aaf727","Type":"ContainerStarted","Data":"3d6c530b457351a187616ca54f43b4aab83f7088d1c87d3e1ed1d40bf62c61d0"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.259762 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" event={"ID":"b76bd594-1b9f-4aff-bde0-390b917fbf5e","Type":"ContainerStarted","Data":"9d2a97f2f47f14fe7925dce5058ddcce553300517753cc14f3d35b692e960cd5"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.260755 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" event={"ID":"e3932611-ca66-44e6-bcd8-5d40328453b4","Type":"ContainerStarted","Data":"dca73b57035eeb28ac28a6bfc36428d5748c0d156814bee0e7b646916c9921e4"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.262088 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" event={"ID":"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4","Type":"ContainerStarted","Data":"b7486c174b95c9e6c9777c2d4fee9b92f1c86ad63984ef00a248862f5ccc5ed0"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.267004 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" event={"ID":"1c0109c0-09bf-407d-b336-e3ff9f6ecea6","Type":"ContainerStarted","Data":"51dc47ae7cc34996fda7f7715106eaba3f1f8400a05cf2d3c7b3ef2fa58daacc"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.268381 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" event={"ID":"b47e3ff7-ea18-461e-ae57-fdbeb817aa45","Type":"ContainerStarted","Data":"b45a691ae4a656cfb9f08b0996f57b4ae5c49bbb7ccda248d7705f608f8b64a0"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.270636 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6qwtj" event={"ID":"66d2db93-b876-4750-86ff-5887f35a7f84","Type":"ContainerStarted","Data":"79eeb63dfb62fa0dbda59974d8b18d3673c97ac93c025dbea4c4798ebac1c9b8"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.271646 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" event={"ID":"f2831aac-4bc0-46ae-8a5b-b7966380ad2e","Type":"ContainerStarted","Data":"234086c45bda72b37ab4e1c7802fa39930267f5775f75db56b65b1c22a52fa81"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.273681 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" event={"ID":"8101d279-dc32-48e9-80c8-52ea60394ca3","Type":"ContainerStarted","Data":"3203098cfe7cd1ed6cf3a5e38a9d1ba133dcf99d5597f1ad083fa6f8c422cbe4"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.276085 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" event={"ID":"af02ba00-7c73-4bc3-a341-5dac59a49e12","Type":"ContainerStarted","Data":"c8d45a1b7421e6b612391c83672dec720b7e1eb89da064489bd195d8d74fce7a"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.279033 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" event={"ID":"266ce950-00f4-440d-9196-6a4ab41404ea","Type":"ContainerStarted","Data":"85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.281590 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" event={"ID":"32c118a6-a92d-47fb-8169-bccbb5e51072","Type":"ContainerStarted","Data":"15a09b708aefb80ad1e2ea642e68d282d10df1d14a4242914f2ffb28014b36db"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.282927 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-s8rd8" event={"ID":"d2bb7ed5-d0b7-4157-a889-5331ba873fde","Type":"ContainerStarted","Data":"23cafce034c0b268eaa5a384ad5252347f43dc712fc00ed518021d20ad8a7f98"} Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.289998 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.291049 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.291103 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.347313 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.348049 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.848036592 +0000 UTC m=+145.214632281 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.448918 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.449018 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.948997651 +0000 UTC m=+145.315593340 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.449330 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.450602 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:07.95059131 +0000 UTC m=+145.317187009 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.549972 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.550245 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.050212068 +0000 UTC m=+145.416807777 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.550348 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.550664 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.050653881 +0000 UTC m=+145.417249570 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.589022 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-t7mgq" podStartSLOduration=124.589002816 podStartE2EDuration="2m4.589002816s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:07.588465919 +0000 UTC m=+144.955061608" watchObservedRunningTime="2025-11-25 10:34:07.589002816 +0000 UTC m=+144.955598515" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.630475 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-6fn9w" podStartSLOduration=124.630457096 podStartE2EDuration="2m4.630457096s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:07.629721174 +0000 UTC m=+144.996316883" watchObservedRunningTime="2025-11-25 10:34:07.630457096 +0000 UTC m=+144.997052775" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.652034 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.652149 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.152123116 +0000 UTC m=+145.518718815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.652459 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.652777 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.152767486 +0000 UTC m=+145.519363175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.672947 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-2r4cg" podStartSLOduration=124.672927178 podStartE2EDuration="2m4.672927178s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:07.669370519 +0000 UTC m=+145.035966208" watchObservedRunningTime="2025-11-25 10:34:07.672927178 +0000 UTC m=+145.039522867" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.710260 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-5h6hm" podStartSLOduration=124.710245081 podStartE2EDuration="2m4.710245081s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:07.709016843 +0000 UTC m=+145.075612542" watchObservedRunningTime="2025-11-25 10:34:07.710245081 +0000 UTC m=+145.076840770" Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.753504 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.753656 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.253633301 +0000 UTC m=+145.620229000 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.753717 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.754095 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.254081445 +0000 UTC m=+145.620677134 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.854835 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.855218 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.355190869 +0000 UTC m=+145.721786558 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:07 crc kubenswrapper[4702]: I1125 10:34:07.956339 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:07 crc kubenswrapper[4702]: E1125 10:34:07.956764 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.456742526 +0000 UTC m=+145.823338295 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.059696 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.059832 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.559801809 +0000 UTC m=+145.926397518 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.060568 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.060860 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.560852402 +0000 UTC m=+145.927448091 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.161686 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.161847 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.661819521 +0000 UTC m=+146.028415200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.161955 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.162214 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.662206423 +0000 UTC m=+146.028802112 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.211092 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.213002 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.213081 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.262654 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.262828 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.762761079 +0000 UTC m=+146.129356798 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.262966 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.263344 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.763300056 +0000 UTC m=+146.129895755 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.289149 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" event={"ID":"097c4dd5-c85f-447c-9448-5969f491f49d","Type":"ContainerStarted","Data":"eceb61b3707f232438e52a2c7c3f4bda7d170422cdb0862b1e0228b877aee1fb"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.292294 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" event={"ID":"4468bda1-5b59-48d8-836b-87faa7f35726","Type":"ContainerStarted","Data":"90f6e04f1b929210c514a847d5eaa68c71c7cf60ff84cf9b21d98258b2658eb1"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.293625 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" event={"ID":"c3d9f60f-f4a5-48c5-a80f-88fb44cff0e4","Type":"ContainerStarted","Data":"f8c6309ae19677dba18eb23dc0cc610e7df7ab5120febeaaf7a2dc9d15dd09c2"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.294914 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" event={"ID":"0fcf11f0-0a26-4ddd-a603-70ace7390469","Type":"ContainerStarted","Data":"f954eae1f7aaa35810eafba5e164bdaa7451a43359178569ed6c3c2e4b9f1e4e"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.296406 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" event={"ID":"8138f6f9-e4df-436f-9b58-b9b3f3e80b26","Type":"ContainerStarted","Data":"a8ae41b6b7c7413f6085a5ba0a1c789af3059eeb88ac00a0bae0cea5621d9d0f"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.297766 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" event={"ID":"5d45eacc-c698-4f1e-bccd-05d88696c983","Type":"ContainerStarted","Data":"074554f69af7b0c5528909d3802253e03058919ca825a2b7c741356de5cb1efd"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.299190 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" event={"ID":"d8ad397b-3500-4791-b005-c6cc5d83e8b1","Type":"ContainerStarted","Data":"0dc837e54e599c64c339081e7fed16b5bf9383569d305be484dc6404831afb93"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.300411 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" event={"ID":"1cae19df-e9ee-4a4c-b00e-814682583af1","Type":"ContainerStarted","Data":"c89a640e868089314a31322277707200412c2a4406f47d7c7e613d9b2cfdb668"} Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.301210 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.301254 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.317719 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" podStartSLOduration=125.317700766 podStartE2EDuration="2m5.317700766s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:08.317682086 +0000 UTC m=+145.684277805" watchObservedRunningTime="2025-11-25 10:34:08.317700766 +0000 UTC m=+145.684296455" Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.340131 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-k86kg" podStartSLOduration=125.340107988 podStartE2EDuration="2m5.340107988s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:08.334694711 +0000 UTC m=+145.701290440" watchObservedRunningTime="2025-11-25 10:34:08.340107988 +0000 UTC m=+145.706703677" Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.363976 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.364209 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.864176002 +0000 UTC m=+146.230771691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.364498 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.364867 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.864857913 +0000 UTC m=+146.231453602 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.465376 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.465594 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.965559304 +0000 UTC m=+146.332154993 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.466007 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.466662 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:08.966654428 +0000 UTC m=+146.333250117 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.567469 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.567836 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.067810162 +0000 UTC m=+146.434405851 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.669243 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.669677 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.169657408 +0000 UTC m=+146.536253097 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.770372 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.770791 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.270756581 +0000 UTC m=+146.637352270 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.770868 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.771180 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.271165924 +0000 UTC m=+146.637761613 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.872298 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.872469 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.372439033 +0000 UTC m=+146.739034722 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.872663 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.873086 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.373070242 +0000 UTC m=+146.739665921 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.974286 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.974473 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.474447954 +0000 UTC m=+146.841043643 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:08 crc kubenswrapper[4702]: I1125 10:34:08.974521 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:08 crc kubenswrapper[4702]: E1125 10:34:08.975019 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.475012191 +0000 UTC m=+146.841607880 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.083919 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.084636 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.584616727 +0000 UTC m=+146.951212416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.186634 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.187301 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.687276508 +0000 UTC m=+147.053872227 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.255106 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.255385 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.288428 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.288594 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.788562257 +0000 UTC m=+147.155157956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.288864 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.289314 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.78930519 +0000 UTC m=+147.155900879 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.306659 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" event={"ID":"fb2736bf-dd62-4b2a-982c-5aaa95671814","Type":"ContainerStarted","Data":"9e02364d4fa35d022d9a264130e53303e67dea6c5aae926e4e02bd67a7e30546"} Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.309874 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" event={"ID":"f2831aac-4bc0-46ae-8a5b-b7966380ad2e","Type":"ContainerStarted","Data":"68bf7bfba4278a314ae3385b873e565c052f1b94c4144230d18e9028b4c75aab"} Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.328211 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-7s7k2" podStartSLOduration=126.328177731 podStartE2EDuration="2m6.328177731s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:09.325746556 +0000 UTC m=+146.692342325" watchObservedRunningTime="2025-11-25 10:34:09.328177731 +0000 UTC m=+146.694773460" Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.390961 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.391500 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.891469906 +0000 UTC m=+147.258065595 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.493093 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.493672 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:09.993653493 +0000 UTC m=+147.360249182 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.598517 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.599521 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.0994298 +0000 UTC m=+147.466025489 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.600466 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.600816 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.100808123 +0000 UTC m=+147.467403812 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.701135 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.701292 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.201267856 +0000 UTC m=+147.567863545 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.701820 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.702200 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.202186314 +0000 UTC m=+147.568782003 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.803257 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.803454 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.303422262 +0000 UTC m=+147.670017951 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.803540 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.803868 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.303853375 +0000 UTC m=+147.670449064 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:09 crc kubenswrapper[4702]: I1125 10:34:09.905038 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:09 crc kubenswrapper[4702]: E1125 10:34:09.905526 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.405506945 +0000 UTC m=+147.772102634 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.007006 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.007441 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.507418274 +0000 UTC m=+147.874014033 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.108391 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.108732 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.608714973 +0000 UTC m=+147.975310662 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.210151 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.210513 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.710494957 +0000 UTC m=+148.077090646 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.217621 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:10 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:10 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:10 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.217704 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.311517 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.311698 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.811659232 +0000 UTC m=+148.178254921 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.311847 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.312230 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.812213679 +0000 UTC m=+148.178809368 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.321137 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" event={"ID":"b96ae9ba-e486-417d-9d4b-f3a7ad987de3","Type":"ContainerStarted","Data":"d7174d2975b9925e4dfbb9ab32495b81dedeefe5c4f08b254cade96afa971b1a"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.326583 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" event={"ID":"de0d09a1-663f-4a61-a609-b74ee9eb887e","Type":"ContainerStarted","Data":"f35c05608b7ef0c9ac2aaae93c4140ab29a0e732a708d12cea439c0df87b91cb"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.326744 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.342035 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" event={"ID":"8e260cd3-afc7-4f82-90de-e3cd459cc79a","Type":"ContainerStarted","Data":"8d4fbd737541febbc3a2d487fff2561b2349e17acd498de74d337a3532eb58dc"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.348506 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-6ntqc" podStartSLOduration=127.348485729 podStartE2EDuration="2m7.348485729s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.343554047 +0000 UTC m=+147.710149736" watchObservedRunningTime="2025-11-25 10:34:10.348485729 +0000 UTC m=+147.715081418" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.358448 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" event={"ID":"9402a523-ed8c-499b-bac9-0a0d6598ef52","Type":"ContainerStarted","Data":"d0ab832468c56371451f4a48201768e9ea178e7903a3fe19c3ada00817782f46"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.358876 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.359962 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" event={"ID":"e3932611-ca66-44e6-bcd8-5d40328453b4","Type":"ContainerStarted","Data":"0cf989401e9a47d9607eb7d17df2b977deebf76b90046be6e0dee68a6dafb32c"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.361870 4702 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-n4r8n container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.361932 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.363550 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-s8rd8" event={"ID":"d2bb7ed5-d0b7-4157-a889-5331ba873fde","Type":"ContainerStarted","Data":"a3997f97382f36c100a181f981311163defefbd1dfe7053597cf7de4f8adf66e"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.365371 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" event={"ID":"e22e5523-d9e6-4257-bd76-b216c4bee1be","Type":"ContainerStarted","Data":"1dca76e51b4e63136c0843f805f41bf1b2d51d0f79642cfdb03eaddd508a3807"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.366210 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.370337 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" event={"ID":"7835a751-7b09-4e97-94a6-1f920dc0fc15","Type":"ContainerStarted","Data":"38496dd4b4c24bfac43671a73b040099d4629430612990547a20c87bb6683926"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.375296 4702 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d5g4l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.375358 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.379090 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" podStartSLOduration=127.379075814 podStartE2EDuration="2m7.379075814s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.374467102 +0000 UTC m=+147.741062791" watchObservedRunningTime="2025-11-25 10:34:10.379075814 +0000 UTC m=+147.745671503" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.381148 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-6qwtj" event={"ID":"66d2db93-b876-4750-86ff-5887f35a7f84","Type":"ContainerStarted","Data":"94a40147bb688671f852c7b3ee8da42d1b9109fd235d43e55164fd3bb54da6b4"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.390101 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-gn6ks" event={"ID":"cf4f2a23-8387-4837-8635-0e76a2d340a4","Type":"ContainerStarted","Data":"dc9996439688a54be27842f78d56cd797b257f996204bc52a952b17ca356cad4"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.404424 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" podStartSLOduration=127.404402097 podStartE2EDuration="2m7.404402097s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.398330689 +0000 UTC m=+147.764926398" watchObservedRunningTime="2025-11-25 10:34:10.404402097 +0000 UTC m=+147.770997786" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.406078 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" event={"ID":"6c924c09-c172-4f11-91a5-86cb5949e5cd","Type":"ContainerStarted","Data":"3b05cc13d6b2417297e3f1bc69c2213e69bb2386ce146fe0ab80e20aebb51e95"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.407093 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.410994 4702 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-t5vl4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.411037 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" podUID="6c924c09-c172-4f11-91a5-86cb5949e5cd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.412648 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.413019 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:10.913002332 +0000 UTC m=+148.279598021 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.414317 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" event={"ID":"d70c50e1-8a6b-4f9b-8d03-79eff4b911d4","Type":"ContainerStarted","Data":"6f033faf4c21e9beb2687d42ae63baee4cebe4ace141e01a3760a1ecd617fa0f"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.418536 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" event={"ID":"eb58e8de-46f8-426c-a656-c8d4ad37950e","Type":"ContainerStarted","Data":"725c2bfbbb5f086181646193d781854d03d4d8447ddf4ab342a7a661bcda682c"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.423007 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-8kd6d" podStartSLOduration=127.422975391 podStartE2EDuration="2m7.422975391s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.420650889 +0000 UTC m=+147.787246578" watchObservedRunningTime="2025-11-25 10:34:10.422975391 +0000 UTC m=+147.789571080" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.453779 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" event={"ID":"853aac53-23c5-4f78-a291-bc82dff9e338","Type":"ContainerStarted","Data":"e039a38c82f8a68c96e7c289dccf4ffc5849572baa3813602ecb5f74281537e9"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.459763 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" event={"ID":"b22715b1-a39f-4f63-a05d-4f49ce20b654","Type":"ContainerStarted","Data":"de884903edae113d04c41345056aff9e24f557902de62f6f6f389a1ea6c2c744"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.470794 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podStartSLOduration=127.470778117 podStartE2EDuration="2m7.470778117s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.446211478 +0000 UTC m=+147.812807167" watchObservedRunningTime="2025-11-25 10:34:10.470778117 +0000 UTC m=+147.837373806" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.470845 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" event={"ID":"32c118a6-a92d-47fb-8169-bccbb5e51072","Type":"ContainerStarted","Data":"a4b65bfbc71f8464a9d2f7a1942602a7efcba0937961f5aff9d9d41d3faa3436"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.472148 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-gn6ks" podStartSLOduration=8.472141499 podStartE2EDuration="8.472141499s" podCreationTimestamp="2025-11-25 10:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.469366344 +0000 UTC m=+147.835962033" watchObservedRunningTime="2025-11-25 10:34:10.472141499 +0000 UTC m=+147.838737188" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.472444 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" event={"ID":"1c0109c0-09bf-407d-b336-e3ff9f6ecea6","Type":"ContainerStarted","Data":"0396f5d97468e7a296aa0d74f6a73350302535735ca81a1d7638baa3a5f5454b"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.472767 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.480084 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" event={"ID":"b47e3ff7-ea18-461e-ae57-fdbeb817aa45","Type":"ContainerStarted","Data":"3822dd189b8e8fd69e2b6552e087bcb1abb6dfe5fc9964e8549425d7f32f1e50"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.485613 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" event={"ID":"738f53e3-118f-44e8-9b32-baeff939bef4","Type":"ContainerStarted","Data":"3448e8f27e65c1fd04130db9a11d247ff42829ac79a56778aeef7d2b1bef550d"} Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.485660 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.486332 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.488995 4702 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-5mp5k container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.489038 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" podUID="097c4dd5-c85f-447c-9448-5969f491f49d" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.489257 4702 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-995q8 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.489279 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" podUID="5d45eacc-c698-4f1e-bccd-05d88696c983" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.493836 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-6qwtj" podStartSLOduration=8.493824499 podStartE2EDuration="8.493824499s" podCreationTimestamp="2025-11-25 10:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.493250612 +0000 UTC m=+147.859846301" watchObservedRunningTime="2025-11-25 10:34:10.493824499 +0000 UTC m=+147.860420188" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.515785 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.523702 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.023677742 +0000 UTC m=+148.390273421 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.537931 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" podStartSLOduration=127.537914351 podStartE2EDuration="2m7.537914351s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.516349435 +0000 UTC m=+147.882945124" watchObservedRunningTime="2025-11-25 10:34:10.537914351 +0000 UTC m=+147.904510040" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.550429 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-6x8kb" podStartSLOduration=127.550405747 podStartE2EDuration="2m7.550405747s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.537265511 +0000 UTC m=+147.903861200" watchObservedRunningTime="2025-11-25 10:34:10.550405747 +0000 UTC m=+147.917001446" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.620528 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.621784 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.121756641 +0000 UTC m=+148.488352370 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.635084 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-zh82l" podStartSLOduration=127.635067542 podStartE2EDuration="2m7.635067542s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.59938145 +0000 UTC m=+147.965977139" watchObservedRunningTime="2025-11-25 10:34:10.635067542 +0000 UTC m=+148.001663231" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.635461 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-fld29" podStartSLOduration=127.635456685 podStartE2EDuration="2m7.635456685s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.63239313 +0000 UTC m=+147.998988849" watchObservedRunningTime="2025-11-25 10:34:10.635456685 +0000 UTC m=+148.002052374" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.688028 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-cr5t9" podStartSLOduration=127.688012848 podStartE2EDuration="2m7.688012848s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.67546207 +0000 UTC m=+148.042057759" watchObservedRunningTime="2025-11-25 10:34:10.688012848 +0000 UTC m=+148.054608537" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.701967 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" podStartSLOduration=127.701949199 podStartE2EDuration="2m7.701949199s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.70135051 +0000 UTC m=+148.067946209" watchObservedRunningTime="2025-11-25 10:34:10.701949199 +0000 UTC m=+148.068544888" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.722137 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.722548 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.222529314 +0000 UTC m=+148.589125053 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.739083 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-vnwhc" podStartSLOduration=127.739061335 podStartE2EDuration="2m7.739061335s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.737331232 +0000 UTC m=+148.103926931" watchObservedRunningTime="2025-11-25 10:34:10.739061335 +0000 UTC m=+148.105657024" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.762211 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" podStartSLOduration=127.762184539 podStartE2EDuration="2m7.762184539s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.758426423 +0000 UTC m=+148.125022132" watchObservedRunningTime="2025-11-25 10:34:10.762184539 +0000 UTC m=+148.128780228" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.781032 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-gg8sb" podStartSLOduration=127.781009921 podStartE2EDuration="2m7.781009921s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.77969087 +0000 UTC m=+148.146286569" watchObservedRunningTime="2025-11-25 10:34:10.781009921 +0000 UTC m=+148.147605620" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.799545 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-4jnl2" podStartSLOduration=127.799529063 podStartE2EDuration="2m7.799529063s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.798491791 +0000 UTC m=+148.165087490" watchObservedRunningTime="2025-11-25 10:34:10.799529063 +0000 UTC m=+148.166124752" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.821409 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" podStartSLOduration=127.821387888 podStartE2EDuration="2m7.821387888s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.817545709 +0000 UTC m=+148.184141398" watchObservedRunningTime="2025-11-25 10:34:10.821387888 +0000 UTC m=+148.187983577" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.823199 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.823583 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.323565315 +0000 UTC m=+148.690161004 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.840679 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-j84mt" podStartSLOduration=127.840661973 podStartE2EDuration="2m7.840661973s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.837689842 +0000 UTC m=+148.204285541" watchObservedRunningTime="2025-11-25 10:34:10.840661973 +0000 UTC m=+148.207257662" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.862811 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" podStartSLOduration=127.862791057 podStartE2EDuration="2m7.862791057s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.860369232 +0000 UTC m=+148.226964931" watchObservedRunningTime="2025-11-25 10:34:10.862791057 +0000 UTC m=+148.229386746" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.904462 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" podStartSLOduration=127.904443413 podStartE2EDuration="2m7.904443413s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.902051299 +0000 UTC m=+148.268647008" watchObservedRunningTime="2025-11-25 10:34:10.904443413 +0000 UTC m=+148.271039102" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.905289 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-qpctp" podStartSLOduration=127.905280989 podStartE2EDuration="2m7.905280989s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.880107832 +0000 UTC m=+148.246703531" watchObservedRunningTime="2025-11-25 10:34:10.905280989 +0000 UTC m=+148.271876678" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.916651 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-sfbdd" podStartSLOduration=127.916633959 podStartE2EDuration="2m7.916633959s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.91569969 +0000 UTC m=+148.282295379" watchObservedRunningTime="2025-11-25 10:34:10.916633959 +0000 UTC m=+148.283229648" Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.925303 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:10 crc kubenswrapper[4702]: E1125 10:34:10.925719 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.425700029 +0000 UTC m=+148.792295788 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:10 crc kubenswrapper[4702]: I1125 10:34:10.935707 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-n7wzw" podStartSLOduration=127.935688358 podStartE2EDuration="2m7.935688358s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:10.934980346 +0000 UTC m=+148.301576035" watchObservedRunningTime="2025-11-25 10:34:10.935688358 +0000 UTC m=+148.302284047" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.026251 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.026416 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.52639363 +0000 UTC m=+148.892989319 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.026463 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.026784 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.526776122 +0000 UTC m=+148.893371811 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.127400 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.127570 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.627536314 +0000 UTC m=+148.994132013 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.127704 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.128051 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.62803683 +0000 UTC m=+148.994632579 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.214160 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:11 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:11 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:11 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.214213 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.228716 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.228928 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.728884915 +0000 UTC m=+149.095480604 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.229058 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.229380 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.72936844 +0000 UTC m=+149.095964129 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.330018 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.330025 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.830007309 +0000 UTC m=+149.196602998 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.331022 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.331333 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.83132557 +0000 UTC m=+149.197921259 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.432493 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.432637 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.932609728 +0000 UTC m=+149.299205417 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.432780 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.433079 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:11.933067212 +0000 UTC m=+149.299662901 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.491230 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" event={"ID":"0fcf11f0-0a26-4ddd-a603-70ace7390469","Type":"ContainerStarted","Data":"0f44b8c934743f31d05311c91d1a2ed6a48e694458f8047e550be1f75b344d3f"} Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.493498 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" event={"ID":"8138f6f9-e4df-436f-9b58-b9b3f3e80b26","Type":"ContainerStarted","Data":"4499d1a806e49eff6969e52ef925c54ba94d1517c862d2086f943cdc3ad3327a"} Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.495248 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" event={"ID":"fb2736bf-dd62-4b2a-982c-5aaa95671814","Type":"ContainerStarted","Data":"63dcfc74af5a8e23a5090ddcb4ececcd1363777bb1f8d574cabffc87c054f552"} Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.497101 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" event={"ID":"eb58e8de-46f8-426c-a656-c8d4ad37950e","Type":"ContainerStarted","Data":"80e472ab1fc0b0ddf963aaadbc191d52f98093fd03898d1f3ab57cab9fcd30e9"} Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.498701 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" event={"ID":"f2831aac-4bc0-46ae-8a5b-b7966380ad2e","Type":"ContainerStarted","Data":"64d72d31ebf810374322034d77ae546da286a0d467659e17e2ce0bd58a8b066f"} Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499788 4702 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-995q8 container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" start-of-body= Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499810 4702 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-5mp5k container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" start-of-body= Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499824 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" podUID="5d45eacc-c698-4f1e-bccd-05d88696c983" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.38:8443/healthz\": dial tcp 10.217.0.38:8443: connect: connection refused" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499794 4702 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-n4r8n container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499862 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499861 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" podUID="097c4dd5-c85f-447c-9448-5969f491f49d" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.31:8443/healthz\": dial tcp 10.217.0.31:8443: connect: connection refused" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499855 4702 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d5g4l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.499794 4702 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-t5vl4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.500636 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" podUID="6c924c09-c172-4f11-91a5-86cb5949e5cd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.500598 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.533980 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.534166 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.034138175 +0000 UTC m=+149.400733874 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.534344 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.534607 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.034596609 +0000 UTC m=+149.401192298 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.635294 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.635493 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.135460715 +0000 UTC m=+149.502056414 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.635682 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.636078 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.136066054 +0000 UTC m=+149.502661743 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.742037 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.742241 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.242210832 +0000 UTC m=+149.608806521 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.742543 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.742865 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.242851042 +0000 UTC m=+149.609446731 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.843986 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.844171 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.344141881 +0000 UTC m=+149.710737570 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.844404 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.844725 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.344712749 +0000 UTC m=+149.711308438 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.945406 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.945604 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.445575615 +0000 UTC m=+149.812171314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:11 crc kubenswrapper[4702]: I1125 10:34:11.945750 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:11 crc kubenswrapper[4702]: E1125 10:34:11.946085 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.44607431 +0000 UTC m=+149.812670059 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.047158 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.047383 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.547354069 +0000 UTC m=+149.913949758 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.047494 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.047893 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.547873235 +0000 UTC m=+149.914468924 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.148630 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.148783 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.648762871 +0000 UTC m=+150.015358560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.149012 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.149326 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.649316368 +0000 UTC m=+150.015912057 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.212889 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:12 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:12 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:12 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.213047 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.250182 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.250279 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.750257576 +0000 UTC m=+150.116853265 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.250405 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.250452 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.250525 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.250570 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.250597 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.250854 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.750846135 +0000 UTC m=+150.117441824 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.255720 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.255881 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.255925 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.328267 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.351711 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.352054 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.852037871 +0000 UTC m=+150.218633560 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.417613 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.426111 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.434728 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.453896 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.454304 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:12.954287589 +0000 UTC m=+150.320883278 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.517164 4702 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-n4r8n container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.517207 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.517514 4702 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d5g4l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.517543 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.518752 4702 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-t5vl4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.518787 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" podUID="6c924c09-c172-4f11-91a5-86cb5949e5cd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.554564 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.555011 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.0549922 +0000 UTC m=+150.421587889 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.657077 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.657371 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.157357872 +0000 UTC m=+150.523953561 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.758614 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.759107 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.259089265 +0000 UTC m=+150.625684954 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.859792 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.860719 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.360705824 +0000 UTC m=+150.727301513 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:12 crc kubenswrapper[4702]: I1125 10:34:12.961071 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:12 crc kubenswrapper[4702]: E1125 10:34:12.961331 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.461315962 +0000 UTC m=+150.827911651 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.062489 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.062847 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.562831968 +0000 UTC m=+150.929427657 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.164108 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.164585 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.66456553 +0000 UTC m=+151.031161219 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.222474 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:13 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:13 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:13 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.222547 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.265255 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.265616 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.765586951 +0000 UTC m=+151.132182630 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.366662 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.366841 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.866813488 +0000 UTC m=+151.233409177 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.367167 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.367491 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.867477348 +0000 UTC m=+151.234073037 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.468284 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.468442 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.968418727 +0000 UTC m=+151.335014416 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.468537 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.468792 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:13.968782048 +0000 UTC m=+151.335377737 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.521831 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-s8rd8" event={"ID":"d2bb7ed5-d0b7-4157-a889-5331ba873fde","Type":"ContainerStarted","Data":"07d6cb4d2389de6c3b2f977c608bf413f688dde9e6ef729b1fccfa929a191763"} Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.523010 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"855ad8a97c035dd521dccd217fd7ef272620aa301afaa748698134d1bdb94486"} Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.524085 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"84e3b3341ec092e734df4b08b1350deab81edc803880a87cba63af813fd759d7"} Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.526260 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" event={"ID":"738f53e3-118f-44e8-9b32-baeff939bef4","Type":"ContainerStarted","Data":"ac9163ef5c6a525122beb688d72629ecb51f84c1ab6cc6747316d476d2253055"} Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.527527 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"daee945bd9ae50c861f9f958ff149fdfefd71a6ddcc1bc17204c966e12c960d5"} Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.553137 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-45qgf" podStartSLOduration=130.553120053 podStartE2EDuration="2m10.553120053s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:13.550784211 +0000 UTC m=+150.917379900" watchObservedRunningTime="2025-11-25 10:34:13.553120053 +0000 UTC m=+150.919715742" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.569212 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.569394 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.069366575 +0000 UTC m=+151.435962274 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.569461 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.570024 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.070015585 +0000 UTC m=+151.436611274 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.578041 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-5klvj" podStartSLOduration=130.578027193 podStartE2EDuration="2m10.578027193s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:13.576527516 +0000 UTC m=+150.943123235" watchObservedRunningTime="2025-11-25 10:34:13.578027193 +0000 UTC m=+150.944622882" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.590498 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.590556 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.670689 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.670921 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.170874991 +0000 UTC m=+151.537470680 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.671031 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.671338 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.171309434 +0000 UTC m=+151.537905123 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.677173 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.677995 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.691521 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.692196 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.692791 4702 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-cf8hl container/openshift-config-operator namespace/openshift-config-operator: Liveness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.692824 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" podUID="de0d09a1-663f-4a61-a609-b74ee9eb887e" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.692838 4702 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-cf8hl container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.692890 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" podUID="de0d09a1-663f-4a61-a609-b74ee9eb887e" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.693081 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.774634 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.774820 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.274799211 +0000 UTC m=+151.641394900 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.774951 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.775320 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.275308857 +0000 UTC m=+151.641904546 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.875746 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.876055 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.876093 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.876547 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.376528064 +0000 UTC m=+151.743123743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.977100 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.977157 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.977173 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:13 crc kubenswrapper[4702]: E1125 10:34:13.977670 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.477658728 +0000 UTC m=+151.844254417 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:13 crc kubenswrapper[4702]: I1125 10:34:13.977807 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.005435 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.077775 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.078217 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.578199353 +0000 UTC m=+151.944795042 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.078333 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.078642 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.578631257 +0000 UTC m=+151.945226946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.178944 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.179107 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.67908154 +0000 UTC m=+152.045677229 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.179509 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.179917 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.679878964 +0000 UTC m=+152.046474653 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.195771 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.195822 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.197874 4702 patch_prober.go:28] interesting pod/apiserver-7bbb656c7d-btsrc container/oauth-apiserver namespace/openshift-oauth-apiserver: Startup probe status=failure output="Get \"https://10.217.0.22:8443/livez\": dial tcp 10.217.0.22:8443: connect: connection refused" start-of-body= Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.197968 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" podUID="8e260cd3-afc7-4f82-90de-e3cd459cc79a" containerName="oauth-apiserver" probeResult="failure" output="Get \"https://10.217.0.22:8443/livez\": dial tcp 10.217.0.22:8443: connect: connection refused" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.214373 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:14 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:14 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:14 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.214429 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.225952 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.244312 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-b2ph2" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.258876 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.263496 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.280261 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.281496 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.781473883 +0000 UTC m=+152.148069572 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.291829 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.381801 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.382143 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.882127202 +0000 UTC m=+152.248722891 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.482630 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.482844 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.982820292 +0000 UTC m=+152.349415981 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.483180 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.483517 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:14.983508753 +0000 UTC m=+152.350104442 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.532476 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"0f75ecab4b96071e9a99567cc42e69ccfdbedd38fac32cbee6ddd83eb32ac467"} Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.532559 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.533801 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"dfa9d816a040852041d713676a1f687ebb1376906602b7853bbe22f9e824e13d"} Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.534951 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"a43977f141d91df72273eab08263d8fad1c65d0660ec7b852d6b4845bb413a1b"} Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.558845 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.584493 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.584929 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.084889624 +0000 UTC m=+152.451485323 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.656280 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-qfr6v" podStartSLOduration=131.656263319 podStartE2EDuration="2m11.656263319s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:14.650799521 +0000 UTC m=+152.017395210" watchObservedRunningTime="2025-11-25 10:34:14.656263319 +0000 UTC m=+152.022859008" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.688712 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.691660 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.191639732 +0000 UTC m=+152.558235501 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.701483 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.701569 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.702003 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.702028 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.708908 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.717421 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" podStartSLOduration=131.717405008 podStartE2EDuration="2m11.717405008s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:14.716020275 +0000 UTC m=+152.082615974" watchObservedRunningTime="2025-11-25 10:34:14.717405008 +0000 UTC m=+152.084000697" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.730427 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.790572 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.790940 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.290918409 +0000 UTC m=+152.657514108 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.816261 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-7n7vp" podStartSLOduration=131.816241011 podStartE2EDuration="2m11.816241011s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:14.808274255 +0000 UTC m=+152.174869954" watchObservedRunningTime="2025-11-25 10:34:14.816241011 +0000 UTC m=+152.182836690" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.891493 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-s8rd8" podStartSLOduration=12.891469955 podStartE2EDuration="12.891469955s" podCreationTimestamp="2025-11-25 10:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:14.862463789 +0000 UTC m=+152.229059478" watchObservedRunningTime="2025-11-25 10:34:14.891469955 +0000 UTC m=+152.258065654" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.891918 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.893501 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.393484257 +0000 UTC m=+152.760080036 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.931264 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-l6bw4" podStartSLOduration=131.931242894 podStartE2EDuration="2m11.931242894s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:14.928325904 +0000 UTC m=+152.294921613" watchObservedRunningTime="2025-11-25 10:34:14.931242894 +0000 UTC m=+152.297838583" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.946420 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.954233 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-5mp5k" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.976102 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.976610 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.978969 4702 patch_prober.go:28] interesting pod/console-f9d7485db-zh82l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.979037 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zh82l" podUID="9923db54-633b-4725-87f8-384fa9feac18" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 25 10:34:14 crc kubenswrapper[4702]: I1125 10:34:14.992697 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:14 crc kubenswrapper[4702]: E1125 10:34:14.993270 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.493254449 +0000 UTC m=+152.859850138 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.094818 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.095464 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.595449646 +0000 UTC m=+152.962045335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.097146 4702 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-n4r8n container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" start-of-body= Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.097203 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.26:8443/healthz\": dial tcp 10.217.0.26:8443: connect: connection refused" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.196994 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.696976902 +0000 UTC m=+153.063572591 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.197041 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.197278 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.197571 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.69756012 +0000 UTC m=+153.064155809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.211729 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.215711 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:15 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:15 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:15 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.215767 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.221268 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-995q8" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.298413 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.298536 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.798488478 +0000 UTC m=+153.165084167 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.298721 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.299054 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.799043415 +0000 UTC m=+153.165639114 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.400416 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.400602 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.900577482 +0000 UTC m=+153.267173171 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.400860 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.401340 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:15.901319295 +0000 UTC m=+153.267914984 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.463197 4702 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-t5vl4 container/packageserver namespace/openshift-operator-lifecycle-manager: Liveness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.463226 4702 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-t5vl4 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.463255 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" podUID="6c924c09-c172-4f11-91a5-86cb5949e5cd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.463281 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" podUID="6c924c09-c172-4f11-91a5-86cb5949e5cd" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.489145 4702 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d5g4l container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.489215 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.489222 4702 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-d5g4l container/marketplace-operator namespace/openshift-marketplace: Liveness probe status=failure output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" start-of-body= Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.489275 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.23:8080/healthz\": dial tcp 10.217.0.23:8080: connect: connection refused" Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.505229 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.505836 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.005806163 +0000 UTC m=+153.372401852 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.541567 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" event={"ID":"26c8117b-b4b8-4563-980c-150a35aaf727","Type":"ContainerStarted","Data":"5bb883db0cd1f2e854cbc37cbbbadcda6d7a586d7fba01592938f8aa45ab9e74"} Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.542512 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b58be22e-99bb-493c-b5d2-c6917b43c9ec","Type":"ContainerStarted","Data":"0433ae0b9f50bf76dd5dd285a7960dd642483fb680b0c5627e3bc84cca2d48ed"} Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.607200 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.607586 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.107569486 +0000 UTC m=+153.474165175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.708490 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.708603 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.208578326 +0000 UTC m=+153.575174015 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.709415 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.709694 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.20968083 +0000 UTC m=+153.576276519 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.811308 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.311026081 +0000 UTC m=+153.677621770 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.811371 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.811683 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.812058 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.312047143 +0000 UTC m=+153.678642832 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:15 crc kubenswrapper[4702]: I1125 10:34:15.913290 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:15 crc kubenswrapper[4702]: E1125 10:34:15.913489 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.413460955 +0000 UTC m=+153.780056644 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.015019 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.015372 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.515356663 +0000 UTC m=+153.881952352 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.116348 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.116564 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.616533709 +0000 UTC m=+153.983129408 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.116690 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.117074 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.617062255 +0000 UTC m=+153.983657944 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.218027 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.218303 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.718273411 +0000 UTC m=+154.084869100 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.218605 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.218937 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.718926902 +0000 UTC m=+154.085522591 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.221384 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:16 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:16 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:16 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.221454 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.319199 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.319568 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.8195464 +0000 UTC m=+154.186142089 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.421306 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.421646 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:16.921630193 +0000 UTC m=+154.288225882 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.522714 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.522931 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.022887021 +0000 UTC m=+154.389482710 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.523039 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.523336 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.023322075 +0000 UTC m=+154.389917764 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.548848 4702 generic.go:334] "Generic (PLEG): container finished" podID="b58be22e-99bb-493c-b5d2-c6917b43c9ec" containerID="b35767687e19ad3de05d85aaee350e39d570c81db738efab1d1618b8e1022957" exitCode=0 Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.548892 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b58be22e-99bb-493c-b5d2-c6917b43c9ec","Type":"ContainerDied","Data":"b35767687e19ad3de05d85aaee350e39d570c81db738efab1d1618b8e1022957"} Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.624011 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.624206 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.12417986 +0000 UTC m=+154.490775549 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.624285 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.624591 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.124580293 +0000 UTC m=+154.491175982 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.725125 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.725231 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.225211521 +0000 UTC m=+154.591807210 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.725326 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.725672 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.225661705 +0000 UTC m=+154.592257394 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.826987 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.827192 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.32716042 +0000 UTC m=+154.693756119 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.827298 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.827686 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.327675376 +0000 UTC m=+154.694271075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.927885 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.928042 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.428020936 +0000 UTC m=+154.794616625 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:16 crc kubenswrapper[4702]: I1125 10:34:16.928097 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:16 crc kubenswrapper[4702]: E1125 10:34:16.928455 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.428443119 +0000 UTC m=+154.795038818 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.029129 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.029307 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.529279414 +0000 UTC m=+154.895875103 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.029445 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.029759 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.529744288 +0000 UTC m=+154.896339977 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.130071 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.130201 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.630172501 +0000 UTC m=+154.996768200 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.130343 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.130751 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.630731508 +0000 UTC m=+154.997327207 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.213932 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:17 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:17 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:17 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.215841 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.231533 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.231827 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.73180065 +0000 UTC m=+155.098396339 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.231960 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.232377 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.732361467 +0000 UTC m=+155.098957156 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.284382 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-vv8tj"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.285587 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.291892 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.299347 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vv8tj"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.332661 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.332834 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-catalog-content\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.332888 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.832848942 +0000 UTC m=+155.199444631 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.332945 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rthpp\" (UniqueName: \"kubernetes.io/projected/58eeaa3d-0858-43f1-a047-52775d340bc0-kube-api-access-rthpp\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.333120 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-utilities\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.333179 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.333532 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.833524953 +0000 UTC m=+155.200120632 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.434215 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.434332 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-utilities\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.434435 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:17.934400309 +0000 UTC m=+155.300996008 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.434655 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-catalog-content\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.434702 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rthpp\" (UniqueName: \"kubernetes.io/projected/58eeaa3d-0858-43f1-a047-52775d340bc0-kube-api-access-rthpp\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.434789 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-utilities\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.435051 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-catalog-content\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.458698 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rthpp\" (UniqueName: \"kubernetes.io/projected/58eeaa3d-0858-43f1-a047-52775d340bc0-kube-api-access-rthpp\") pod \"certified-operators-vv8tj\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.464406 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-dt97x"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.465379 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.468272 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.486180 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dt97x"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.488447 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-cf8hl" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.538495 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j7zgz\" (UniqueName: \"kubernetes.io/projected/5df05e89-c694-4234-b4fe-669de4c1dec5-kube-api-access-j7zgz\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.538582 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.538626 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-utilities\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.538700 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-catalog-content\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.539791 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.039773404 +0000 UTC m=+155.406369093 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.566456 4702 generic.go:334] "Generic (PLEG): container finished" podID="32c118a6-a92d-47fb-8169-bccbb5e51072" containerID="a4b65bfbc71f8464a9d2f7a1942602a7efcba0937961f5aff9d9d41d3faa3436" exitCode=0 Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.566709 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" event={"ID":"32c118a6-a92d-47fb-8169-bccbb5e51072","Type":"ContainerDied","Data":"a4b65bfbc71f8464a9d2f7a1942602a7efcba0937961f5aff9d9d41d3faa3436"} Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.602476 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.639705 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.639828 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j7zgz\" (UniqueName: \"kubernetes.io/projected/5df05e89-c694-4234-b4fe-669de4c1dec5-kube-api-access-j7zgz\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.639853 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.139831215 +0000 UTC m=+155.506426914 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.639935 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.639974 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-utilities\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.640028 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-catalog-content\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.640399 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.140386612 +0000 UTC m=+155.506982301 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.640514 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-catalog-content\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.640806 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-utilities\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.667984 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j7zgz\" (UniqueName: \"kubernetes.io/projected/5df05e89-c694-4234-b4fe-669de4c1dec5-kube-api-access-j7zgz\") pod \"community-operators-dt97x\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.676335 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-rt925"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.677388 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.696108 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rt925"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.740696 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.740947 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-utilities\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.741026 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.24099616 +0000 UTC m=+155.607591879 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.741069 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-catalog-content\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.741150 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lm98d\" (UniqueName: \"kubernetes.io/projected/eb941106-0eca-47c9-82d6-ac91a36f1366-kube-api-access-lm98d\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.741186 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.741485 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.241476124 +0000 UTC m=+155.608071813 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.805814 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.841882 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.842153 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-utilities\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.842260 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-catalog-content\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.842313 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lm98d\" (UniqueName: \"kubernetes.io/projected/eb941106-0eca-47c9-82d6-ac91a36f1366-kube-api-access-lm98d\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.842790 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.342775054 +0000 UTC m=+155.709370743 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.843200 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-utilities\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.843511 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-catalog-content\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.867284 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-2qpnn"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.870379 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.880949 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2qpnn"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.885217 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lm98d\" (UniqueName: \"kubernetes.io/projected/eb941106-0eca-47c9-82d6-ac91a36f1366-kube-api-access-lm98d\") pod \"certified-operators-rt925\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.934619 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.948453 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.948693 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b58be22e-99bb-493c-b5d2-c6917b43c9ec" containerName="pruner" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.948707 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="b58be22e-99bb-493c-b5d2-c6917b43c9ec" containerName="pruner" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.948837 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="b58be22e-99bb-493c-b5d2-c6917b43c9ec" containerName="pruner" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.949245 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.950360 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:17 crc kubenswrapper[4702]: E1125 10:34:17.950669 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.450655696 +0000 UTC m=+155.817251395 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.951404 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.951750 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.971722 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 25 10:34:17 crc kubenswrapper[4702]: I1125 10:34:17.988956 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-vv8tj"] Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.012027 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.051354 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kube-api-access\") pod \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.051519 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kubelet-dir\") pod \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\" (UID: \"b58be22e-99bb-493c-b5d2-c6917b43c9ec\") " Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.051632 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.051824 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "b58be22e-99bb-493c-b5d2-c6917b43c9ec" (UID: "b58be22e-99bb-493c-b5d2-c6917b43c9ec"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.052321 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.552297915 +0000 UTC m=+155.918893644 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.052488 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ced20a2-478c-47da-a590-fea4bcbaebd9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.052587 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ced20a2-478c-47da-a590-fea4bcbaebd9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.052686 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ndmcp\" (UniqueName: \"kubernetes.io/projected/2c770958-ad07-45e3-8793-16a1e66a0aca-kube-api-access-ndmcp\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.052747 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-catalog-content\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.052792 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-utilities\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.052944 4702 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.064216 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "b58be22e-99bb-493c-b5d2-c6917b43c9ec" (UID: "b58be22e-99bb-493c-b5d2-c6917b43c9ec"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.115757 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-dt97x"] Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.154861 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.154994 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ced20a2-478c-47da-a590-fea4bcbaebd9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155039 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ced20a2-478c-47da-a590-fea4bcbaebd9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155066 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ndmcp\" (UniqueName: \"kubernetes.io/projected/2c770958-ad07-45e3-8793-16a1e66a0aca-kube-api-access-ndmcp\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155090 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-catalog-content\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155128 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-utilities\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155183 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/b58be22e-99bb-493c-b5d2-c6917b43c9ec-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155658 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ced20a2-478c-47da-a590-fea4bcbaebd9-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.155696 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-utilities\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.156012 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.655992178 +0000 UTC m=+156.022587947 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.156706 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-catalog-content\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.179449 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ced20a2-478c-47da-a590-fea4bcbaebd9-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.181139 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ndmcp\" (UniqueName: \"kubernetes.io/projected/2c770958-ad07-45e3-8793-16a1e66a0aca-kube-api-access-ndmcp\") pod \"community-operators-2qpnn\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.217692 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:18 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:18 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:18 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.218057 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.218175 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.255449 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.255724 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.755706929 +0000 UTC m=+156.122302618 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.284730 4702 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.285032 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.287814 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-rt925"] Nov 25 10:34:18 crc kubenswrapper[4702]: W1125 10:34:18.321868 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podeb941106_0eca_47c9_82d6_ac91a36f1366.slice/crio-0c8603b075ef1b4749e5703710a04d4d56986e412df7e2647a915269c881df86 WatchSource:0}: Error finding container 0c8603b075ef1b4749e5703710a04d4d56986e412df7e2647a915269c881df86: Status 404 returned error can't find the container with id 0c8603b075ef1b4749e5703710a04d4d56986e412df7e2647a915269c881df86 Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.356992 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.357468 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.857452322 +0000 UTC m=+156.224048011 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.457665 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.457836 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.957805712 +0000 UTC m=+156.324401401 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.458220 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.458602 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2025-11-25 10:34:18.958580556 +0000 UTC m=+156.325176245 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-zbgbq" (UID: "74207563-11c3-4723-8375-7a61d6f27733") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.559476 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:18 crc kubenswrapper[4702]: E1125 10:34:18.559729 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2025-11-25 10:34:19.05971464 +0000 UTC m=+156.426310329 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.582115 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-2qpnn"] Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.587447 4702 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2025-11-25T10:34:18.284757246Z","Handler":null,"Name":""} Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.596296 4702 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.596335 4702 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.625119 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" event={"ID":"26c8117b-b4b8-4563-980c-150a35aaf727","Type":"ContainerStarted","Data":"92520c9c9e13f8e5ebfdf19a6c081718652a9a3621e34c2a5dde8602df915f23"} Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.625517 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" event={"ID":"26c8117b-b4b8-4563-980c-150a35aaf727","Type":"ContainerStarted","Data":"28017b708eece1f6562650f956f7036369c63d038388700ddda1d3154d46dc4d"} Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.645695 4702 generic.go:334] "Generic (PLEG): container finished" podID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerID="e74d018441003dd1022892460e4c59f102076042a2a917b7e287acda31cf7db9" exitCode=0 Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.645792 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerDied","Data":"e74d018441003dd1022892460e4c59f102076042a2a917b7e287acda31cf7db9"} Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.645818 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerStarted","Data":"6fc4f2b3b601304da8eeaef97b3079cdac427b4f2fd91a9d810c823c853bfd24"} Nov 25 10:34:18 crc kubenswrapper[4702]: I1125 10:34:18.650516 4702 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:18.655537 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"b58be22e-99bb-493c-b5d2-c6917b43c9ec","Type":"ContainerDied","Data":"0433ae0b9f50bf76dd5dd285a7960dd642483fb680b0c5627e3bc84cca2d48ed"} Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:18.655569 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0433ae0b9f50bf76dd5dd285a7960dd642483fb680b0c5627e3bc84cca2d48ed" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:18.655626 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.319336 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.322793 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rt925" event={"ID":"eb941106-0eca-47c9-82d6-ac91a36f1366","Type":"ContainerStarted","Data":"0c8603b075ef1b4749e5703710a04d4d56986e412df7e2647a915269c881df86"} Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.322847 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.329190 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:19 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:19 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:19 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.329271 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.336130 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.346019 4702 patch_prober.go:28] interesting pod/apiserver-76f77b778f-fh9fl container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]log ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]etcd ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/start-apiserver-admission-initializer ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/generic-apiserver-start-informers ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/max-in-flight-filter ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/storage-object-count-tracker-hook ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/image.openshift.io-apiserver-caches ok Nov 25 10:34:19 crc kubenswrapper[4702]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Nov 25 10:34:19 crc kubenswrapper[4702]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/project.openshift.io-projectcache ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/openshift.io-startinformers ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/openshift.io-restmapperupdater ok Nov 25 10:34:19 crc kubenswrapper[4702]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Nov 25 10:34:19 crc kubenswrapper[4702]: livez check failed Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.348743 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.361783 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" podUID="738f53e3-118f-44e8-9b32-baeff939bef4" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.366339 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.379277 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.386263 4702 generic.go:334] "Generic (PLEG): container finished" podID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerID="96c46a903ba35e41877319eb88f906065d3242a7ec9e3ce46d99cefed2d111a8" exitCode=0 Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.387697 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerDied","Data":"96c46a903ba35e41877319eb88f906065d3242a7ec9e3ce46d99cefed2d111a8"} Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.387746 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerStarted","Data":"ef78860628f5a45b8c496b80870ce5ed5fe98d943e767ff7ca50f70670b739ba"} Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.436187 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.460877 4702 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.460933 4702 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.478120 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.478810 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-btsrc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.478839 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-rzrq9"] Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.479926 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.485235 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.507678 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rzrq9"] Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.537735 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-utilities\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.537774 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-catalog-content\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.537831 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7qzb6\" (UniqueName: \"kubernetes.io/projected/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-kube-api-access-7qzb6\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.612051 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-zbgbq\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.650950 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-utilities\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.651006 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-catalog-content\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.651090 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7qzb6\" (UniqueName: \"kubernetes.io/projected/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-kube-api-access-7qzb6\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.652061 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-utilities\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.652287 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-catalog-content\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.688101 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7qzb6\" (UniqueName: \"kubernetes.io/projected/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-kube-api-access-7qzb6\") pod \"redhat-marketplace-rzrq9\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.838921 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.866258 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-4qhgc"] Nov 25 10:34:19 crc kubenswrapper[4702]: E1125 10:34:19.866533 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="32c118a6-a92d-47fb-8169-bccbb5e51072" containerName="collect-profiles" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.866549 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="32c118a6-a92d-47fb-8169-bccbb5e51072" containerName="collect-profiles" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.866665 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="32c118a6-a92d-47fb-8169-bccbb5e51072" containerName="collect-profiles" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.867460 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.878721 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.882295 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qhgc"] Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.897670 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.953510 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32c118a6-a92d-47fb-8169-bccbb5e51072-secret-volume\") pod \"32c118a6-a92d-47fb-8169-bccbb5e51072\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.953686 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32c118a6-a92d-47fb-8169-bccbb5e51072-config-volume\") pod \"32c118a6-a92d-47fb-8169-bccbb5e51072\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.953737 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntwj9\" (UniqueName: \"kubernetes.io/projected/32c118a6-a92d-47fb-8169-bccbb5e51072-kube-api-access-ntwj9\") pod \"32c118a6-a92d-47fb-8169-bccbb5e51072\" (UID: \"32c118a6-a92d-47fb-8169-bccbb5e51072\") " Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.953924 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qf7zl\" (UniqueName: \"kubernetes.io/projected/4150132d-8f23-48d9-9635-0701e221acbd-kube-api-access-qf7zl\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.954079 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-utilities\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.954335 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-catalog-content\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.955350 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/32c118a6-a92d-47fb-8169-bccbb5e51072-config-volume" (OuterVolumeSpecName: "config-volume") pod "32c118a6-a92d-47fb-8169-bccbb5e51072" (UID: "32c118a6-a92d-47fb-8169-bccbb5e51072"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.958593 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/32c118a6-a92d-47fb-8169-bccbb5e51072-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "32c118a6-a92d-47fb-8169-bccbb5e51072" (UID: "32c118a6-a92d-47fb-8169-bccbb5e51072"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:34:19 crc kubenswrapper[4702]: I1125 10:34:19.958629 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/32c118a6-a92d-47fb-8169-bccbb5e51072-kube-api-access-ntwj9" (OuterVolumeSpecName: "kube-api-access-ntwj9") pod "32c118a6-a92d-47fb-8169-bccbb5e51072" (UID: "32c118a6-a92d-47fb-8169-bccbb5e51072"). InnerVolumeSpecName "kube-api-access-ntwj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.055503 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-utilities\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.055622 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-catalog-content\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.055661 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qf7zl\" (UniqueName: \"kubernetes.io/projected/4150132d-8f23-48d9-9635-0701e221acbd-kube-api-access-qf7zl\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.055713 4702 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/32c118a6-a92d-47fb-8169-bccbb5e51072-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.055725 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntwj9\" (UniqueName: \"kubernetes.io/projected/32c118a6-a92d-47fb-8169-bccbb5e51072-kube-api-access-ntwj9\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.055733 4702 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/32c118a6-a92d-47fb-8169-bccbb5e51072-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.059371 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-catalog-content\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.059376 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-utilities\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.075714 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qf7zl\" (UniqueName: \"kubernetes.io/projected/4150132d-8f23-48d9-9635-0701e221acbd-kube-api-access-qf7zl\") pod \"redhat-marketplace-4qhgc\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.103678 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zbgbq"] Nov 25 10:34:20 crc kubenswrapper[4702]: W1125 10:34:20.111658 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod74207563_11c3_4723_8375_7a61d6f27733.slice/crio-01e54833f9ebe610ec0d52ff1c049c39ae5e81d4a63583b237e5433c88464db8 WatchSource:0}: Error finding container 01e54833f9ebe610ec0d52ff1c049c39ae5e81d4a63583b237e5433c88464db8: Status 404 returned error can't find the container with id 01e54833f9ebe610ec0d52ff1c049c39ae5e81d4a63583b237e5433c88464db8 Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.145842 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-rzrq9"] Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.214600 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:20 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:20 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:20 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.214647 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.230980 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.403690 4702 generic.go:334] "Generic (PLEG): container finished" podID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerID="b1df9640c1e20fbca1e8196d190a639932226cc0c7fb0a6a722287217ef333c3" exitCode=0 Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.404491 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerDied","Data":"b1df9640c1e20fbca1e8196d190a639932226cc0c7fb0a6a722287217ef333c3"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.404570 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerStarted","Data":"3e60a406f644f96b8c557dc5f6adb6ac37f91614ef434b47b444a96ace80d6ae"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.422286 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" event={"ID":"32c118a6-a92d-47fb-8169-bccbb5e51072","Type":"ContainerDied","Data":"15a09b708aefb80ad1e2ea642e68d282d10df1d14a4242914f2ffb28014b36db"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.422320 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15a09b708aefb80ad1e2ea642e68d282d10df1d14a4242914f2ffb28014b36db" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.422388 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.469775 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qhgc"] Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.471119 4702 generic.go:334] "Generic (PLEG): container finished" podID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerID="73594cb5e83d99d91eb7cc2e179c7f976a8d8055ad8a6eb54536b1a125ecfad8" exitCode=0 Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.471358 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rt925" event={"ID":"eb941106-0eca-47c9-82d6-ac91a36f1366","Type":"ContainerDied","Data":"73594cb5e83d99d91eb7cc2e179c7f976a8d8055ad8a6eb54536b1a125ecfad8"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.484783 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-q7q85"] Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.486480 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.487809 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ced20a2-478c-47da-a590-fea4bcbaebd9","Type":"ContainerStarted","Data":"e9e78b560a704e44a25c32986ca2028e2d6e9adafbf93e2baaf85fa823b72a05"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.487867 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ced20a2-478c-47da-a590-fea4bcbaebd9","Type":"ContainerStarted","Data":"3a575bc14d5458f4b92bfb6e75b5035a376bd966bb8a3d72f1587788e2a31146"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.489303 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q7q85"] Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.489679 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" event={"ID":"74207563-11c3-4723-8375-7a61d6f27733","Type":"ContainerStarted","Data":"6ad705c16ce2020397ada49c92c3f03d19d169c170b7176ed07c722903feec16"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.489715 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" event={"ID":"74207563-11c3-4723-8375-7a61d6f27733","Type":"ContainerStarted","Data":"01e54833f9ebe610ec0d52ff1c049c39ae5e81d4a63583b237e5433c88464db8"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.490105 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.491241 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.503502 4702 generic.go:334] "Generic (PLEG): container finished" podID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerID="bbdcab74af86b8c5617210050c26414e425b311021b4d106d8365179079a28cb" exitCode=0 Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.503592 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rzrq9" event={"ID":"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f","Type":"ContainerDied","Data":"bbdcab74af86b8c5617210050c26414e425b311021b4d106d8365179079a28cb"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.503640 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rzrq9" event={"ID":"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f","Type":"ContainerStarted","Data":"058dbe96be77101fb7f5a4c0bcbb1b4f9a9c31567c75cc6de9beeb3aafc1113b"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.516408 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.516383863 podStartE2EDuration="3.516383863s" podCreationTimestamp="2025-11-25 10:34:17 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:20.513071851 +0000 UTC m=+157.879667550" watchObservedRunningTime="2025-11-25 10:34:20.516383863 +0000 UTC m=+157.882979552" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.546574 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" event={"ID":"26c8117b-b4b8-4563-980c-150a35aaf727","Type":"ContainerStarted","Data":"974df29b98c380ac04419703c7c785ea05242ebe3b320fcf2cd318a9dfd38d9f"} Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.568039 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" podStartSLOduration=137.568022138 podStartE2EDuration="2m17.568022138s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:20.564972134 +0000 UTC m=+157.931567833" watchObservedRunningTime="2025-11-25 10:34:20.568022138 +0000 UTC m=+157.934617827" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.569309 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-s8rd8" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.611409 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-74xrt" podStartSLOduration=18.611393678 podStartE2EDuration="18.611393678s" podCreationTimestamp="2025-11-25 10:34:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:34:20.590976367 +0000 UTC m=+157.957572076" watchObservedRunningTime="2025-11-25 10:34:20.611393678 +0000 UTC m=+157.977989367" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.666198 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-catalog-content\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.666323 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmfhq\" (UniqueName: \"kubernetes.io/projected/a9ba63f3-8505-42ff-8804-47823d199524-kube-api-access-wmfhq\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.666674 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-utilities\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.768225 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-utilities\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.768309 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-catalog-content\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.768705 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-utilities\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.768722 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-catalog-content\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.768770 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmfhq\" (UniqueName: \"kubernetes.io/projected/a9ba63f3-8505-42ff-8804-47823d199524-kube-api-access-wmfhq\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.794087 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmfhq\" (UniqueName: \"kubernetes.io/projected/a9ba63f3-8505-42ff-8804-47823d199524-kube-api-access-wmfhq\") pod \"redhat-operators-q7q85\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.845698 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.865531 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rlsjf"] Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.866632 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.875174 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rlsjf"] Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.973300 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-utilities\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.973699 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mr5kd\" (UniqueName: \"kubernetes.io/projected/78d54279-d896-4edd-b53b-c0458d7fddf6-kube-api-access-mr5kd\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:20 crc kubenswrapper[4702]: I1125 10:34:20.973770 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-catalog-content\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.075214 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-utilities\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.075783 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-utilities\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.075994 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mr5kd\" (UniqueName: \"kubernetes.io/projected/78d54279-d896-4edd-b53b-c0458d7fddf6-kube-api-access-mr5kd\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.076177 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-catalog-content\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.076618 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-catalog-content\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.110317 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mr5kd\" (UniqueName: \"kubernetes.io/projected/78d54279-d896-4edd-b53b-c0458d7fddf6-kube-api-access-mr5kd\") pod \"redhat-operators-rlsjf\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.214531 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:21 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:21 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:21 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.214620 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.267545 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.294664 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-q7q85"] Nov 25 10:34:21 crc kubenswrapper[4702]: W1125 10:34:21.306757 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda9ba63f3_8505_42ff_8804_47823d199524.slice/crio-a78503789b5dc7b17b7348de7d161361b506c4910b55275fbacf48dcaa37855c WatchSource:0}: Error finding container a78503789b5dc7b17b7348de7d161361b506c4910b55275fbacf48dcaa37855c: Status 404 returned error can't find the container with id a78503789b5dc7b17b7348de7d161361b506c4910b55275fbacf48dcaa37855c Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.496631 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rlsjf"] Nov 25 10:34:21 crc kubenswrapper[4702]: W1125 10:34:21.505164 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod78d54279_d896_4edd_b53b_c0458d7fddf6.slice/crio-726481da5ea28536dfeada63e4657434e1ad37e6aea3a3a7e125a14d9d22e18c WatchSource:0}: Error finding container 726481da5ea28536dfeada63e4657434e1ad37e6aea3a3a7e125a14d9d22e18c: Status 404 returned error can't find the container with id 726481da5ea28536dfeada63e4657434e1ad37e6aea3a3a7e125a14d9d22e18c Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.562275 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerStarted","Data":"a78503789b5dc7b17b7348de7d161361b506c4910b55275fbacf48dcaa37855c"} Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.564935 4702 generic.go:334] "Generic (PLEG): container finished" podID="4150132d-8f23-48d9-9635-0701e221acbd" containerID="b25e7448d700ee3b7d6853d3c3eb5541104333a5bd61e01bf3df6e3c403af1f4" exitCode=0 Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.565173 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerDied","Data":"b25e7448d700ee3b7d6853d3c3eb5541104333a5bd61e01bf3df6e3c403af1f4"} Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.565194 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerStarted","Data":"db1d7451cc5ce18c586064fed7c009dcd6fa4273e8478671122ab4508b91b296"} Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.566434 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerStarted","Data":"726481da5ea28536dfeada63e4657434e1ad37e6aea3a3a7e125a14d9d22e18c"} Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.572732 4702 generic.go:334] "Generic (PLEG): container finished" podID="1ced20a2-478c-47da-a590-fea4bcbaebd9" containerID="e9e78b560a704e44a25c32986ca2028e2d6e9adafbf93e2baaf85fa823b72a05" exitCode=0 Nov 25 10:34:21 crc kubenswrapper[4702]: I1125 10:34:21.572839 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ced20a2-478c-47da-a590-fea4bcbaebd9","Type":"ContainerDied","Data":"e9e78b560a704e44a25c32986ca2028e2d6e9adafbf93e2baaf85fa823b72a05"} Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.214599 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:22 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:22 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:22 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.214980 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.581013 4702 generic.go:334] "Generic (PLEG): container finished" podID="a9ba63f3-8505-42ff-8804-47823d199524" containerID="f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87" exitCode=0 Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.581113 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerDied","Data":"f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87"} Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.584865 4702 generic.go:334] "Generic (PLEG): container finished" podID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerID="7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa" exitCode=0 Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.584994 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerDied","Data":"7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa"} Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.855765 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.925139 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ced20a2-478c-47da-a590-fea4bcbaebd9-kube-api-access\") pod \"1ced20a2-478c-47da-a590-fea4bcbaebd9\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.925232 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ced20a2-478c-47da-a590-fea4bcbaebd9-kubelet-dir\") pod \"1ced20a2-478c-47da-a590-fea4bcbaebd9\" (UID: \"1ced20a2-478c-47da-a590-fea4bcbaebd9\") " Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.925329 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/1ced20a2-478c-47da-a590-fea4bcbaebd9-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "1ced20a2-478c-47da-a590-fea4bcbaebd9" (UID: "1ced20a2-478c-47da-a590-fea4bcbaebd9"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.925615 4702 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/1ced20a2-478c-47da-a590-fea4bcbaebd9-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:22 crc kubenswrapper[4702]: I1125 10:34:22.931884 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1ced20a2-478c-47da-a590-fea4bcbaebd9-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1ced20a2-478c-47da-a590-fea4bcbaebd9" (UID: "1ced20a2-478c-47da-a590-fea4bcbaebd9"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:34:23 crc kubenswrapper[4702]: I1125 10:34:23.026365 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1ced20a2-478c-47da-a590-fea4bcbaebd9-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:34:23 crc kubenswrapper[4702]: I1125 10:34:23.214173 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:23 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:23 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:23 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:23 crc kubenswrapper[4702]: I1125 10:34:23.214282 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:23 crc kubenswrapper[4702]: I1125 10:34:23.602875 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"1ced20a2-478c-47da-a590-fea4bcbaebd9","Type":"ContainerDied","Data":"3a575bc14d5458f4b92bfb6e75b5035a376bd966bb8a3d72f1587788e2a31146"} Nov 25 10:34:23 crc kubenswrapper[4702]: I1125 10:34:23.602930 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3a575bc14d5458f4b92bfb6e75b5035a376bd966bb8a3d72f1587788e2a31146" Nov 25 10:34:23 crc kubenswrapper[4702]: I1125 10:34:23.602992 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.139934 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.149137 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-fh9fl" Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.225025 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:24 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:24 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:24 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.225191 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.703680 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.704032 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.704535 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.704563 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.976679 4702 patch_prober.go:28] interesting pod/console-f9d7485db-zh82l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 25 10:34:24 crc kubenswrapper[4702]: I1125 10:34:24.976739 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zh82l" podUID="9923db54-633b-4725-87f8-384fa9feac18" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.101484 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.213152 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:25 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:25 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:25 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.213219 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.473686 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-t5vl4" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.493209 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.776302 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.782855 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c8f0f344-2768-49e0-a344-81f5b457b671-metrics-certs\") pod \"network-metrics-daemon-fnlmg\" (UID: \"c8f0f344-2768-49e0-a344-81f5b457b671\") " pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:34:25 crc kubenswrapper[4702]: I1125 10:34:25.941989 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-fnlmg" Nov 25 10:34:26 crc kubenswrapper[4702]: I1125 10:34:26.214110 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:26 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:26 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:26 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:26 crc kubenswrapper[4702]: I1125 10:34:26.214162 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:27 crc kubenswrapper[4702]: I1125 10:34:27.213393 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:27 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:27 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:27 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:27 crc kubenswrapper[4702]: I1125 10:34:27.213453 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:28 crc kubenswrapper[4702]: I1125 10:34:28.214494 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:28 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:28 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:28 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:28 crc kubenswrapper[4702]: I1125 10:34:28.214572 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:29 crc kubenswrapper[4702]: I1125 10:34:29.214688 4702 patch_prober.go:28] interesting pod/router-default-5444994796-t7mgq container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Nov 25 10:34:29 crc kubenswrapper[4702]: [-]has-synced failed: reason withheld Nov 25 10:34:29 crc kubenswrapper[4702]: [+]process-running ok Nov 25 10:34:29 crc kubenswrapper[4702]: healthz check failed Nov 25 10:34:29 crc kubenswrapper[4702]: I1125 10:34:29.214764 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-t7mgq" podUID="d5393776-7502-4849-b157-6899da0bf181" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Nov 25 10:34:30 crc kubenswrapper[4702]: I1125 10:34:30.228823 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:30 crc kubenswrapper[4702]: I1125 10:34:30.233166 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-t7mgq" Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697126 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697144 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697448 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697485 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697482 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697834 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.697878 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.698077 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"91f13a19d21404f5e507e08116dc83e2ce67255492c9c90958bee7a718fae12a"} pod="openshift-console/downloads-7954f5f757-2r4cg" containerMessage="Container download-server failed liveness probe, will be restarted" Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.698171 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" containerID="cri-o://91f13a19d21404f5e507e08116dc83e2ce67255492c9c90958bee7a718fae12a" gracePeriod=2 Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.977137 4702 patch_prober.go:28] interesting pod/console-f9d7485db-zh82l container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" start-of-body= Nov 25 10:34:34 crc kubenswrapper[4702]: I1125 10:34:34.977195 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-zh82l" podUID="9923db54-633b-4725-87f8-384fa9feac18" containerName="console" probeResult="failure" output="Get \"https://10.217.0.18:8443/health\": dial tcp 10.217.0.18:8443: connect: connection refused" Nov 25 10:34:36 crc kubenswrapper[4702]: I1125 10:34:36.705007 4702 generic.go:334] "Generic (PLEG): container finished" podID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerID="91f13a19d21404f5e507e08116dc83e2ce67255492c9c90958bee7a718fae12a" exitCode=0 Nov 25 10:34:36 crc kubenswrapper[4702]: I1125 10:34:36.705078 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2r4cg" event={"ID":"a8621fa2-6cb1-4e0e-b1ed-3f254430262b","Type":"ContainerDied","Data":"91f13a19d21404f5e507e08116dc83e2ce67255492c9c90958bee7a718fae12a"} Nov 25 10:34:39 crc kubenswrapper[4702]: I1125 10:34:39.903460 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:34:43 crc kubenswrapper[4702]: I1125 10:34:43.590958 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:34:43 crc kubenswrapper[4702]: I1125 10:34:43.591455 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:34:44 crc kubenswrapper[4702]: I1125 10:34:44.697831 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:44 crc kubenswrapper[4702]: I1125 10:34:44.698125 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:44 crc kubenswrapper[4702]: I1125 10:34:44.857775 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-bqhvf" Nov 25 10:34:44 crc kubenswrapper[4702]: I1125 10:34:44.980588 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:44 crc kubenswrapper[4702]: I1125 10:34:44.984411 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-zh82l" Nov 25 10:34:52 crc kubenswrapper[4702]: I1125 10:34:52.523547 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Nov 25 10:34:54 crc kubenswrapper[4702]: I1125 10:34:54.698528 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:34:54 crc kubenswrapper[4702]: I1125 10:34:54.699081 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:34:55 crc kubenswrapper[4702]: E1125 10:34:55.275359 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 25 10:34:55 crc kubenswrapper[4702]: E1125 10:34:55.275617 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-rthpp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-vv8tj_openshift-marketplace(58eeaa3d-0858-43f1-a047-52775d340bc0): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:34:55 crc kubenswrapper[4702]: E1125 10:34:55.276787 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-vv8tj" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" Nov 25 10:34:55 crc kubenswrapper[4702]: E1125 10:34:55.308522 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Nov 25 10:34:55 crc kubenswrapper[4702]: E1125 10:34:55.308671 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-lm98d,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-rt925_openshift-marketplace(eb941106-0eca-47c9-82d6-ac91a36f1366): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:34:55 crc kubenswrapper[4702]: E1125 10:34:55.309818 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-rt925" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" Nov 25 10:34:57 crc kubenswrapper[4702]: E1125 10:34:57.039799 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-rt925" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" Nov 25 10:34:57 crc kubenswrapper[4702]: E1125 10:34:57.040441 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-vv8tj" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" Nov 25 10:35:03 crc kubenswrapper[4702]: E1125 10:35:03.341355 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 25 10:35:03 crc kubenswrapper[4702]: E1125 10:35:03.341872 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-7qzb6,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-rzrq9_openshift-marketplace(cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:35:03 crc kubenswrapper[4702]: E1125 10:35:03.343158 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-rzrq9" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" Nov 25 10:35:04 crc kubenswrapper[4702]: I1125 10:35:04.698040 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:35:04 crc kubenswrapper[4702]: I1125 10:35:04.698098 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:35:05 crc kubenswrapper[4702]: E1125 10:35:05.986146 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 25 10:35:05 crc kubenswrapper[4702]: E1125 10:35:05.986748 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-j7zgz,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-dt97x_openshift-marketplace(5df05e89-c694-4234-b4fe-669de4c1dec5): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:35:05 crc kubenswrapper[4702]: E1125 10:35:05.988393 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-dt97x" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" Nov 25 10:35:06 crc kubenswrapper[4702]: E1125 10:35:06.458595 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Nov 25 10:35:06 crc kubenswrapper[4702]: E1125 10:35:06.458795 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qf7zl,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-4qhgc_openshift-marketplace(4150132d-8f23-48d9-9635-0701e221acbd): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:35:06 crc kubenswrapper[4702]: E1125 10:35:06.459965 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-4qhgc" podUID="4150132d-8f23-48d9-9635-0701e221acbd" Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.009241 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-4qhgc" podUID="4150132d-8f23-48d9-9635-0701e221acbd" Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.009704 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-dt97x" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.009978 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-rzrq9" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.039734 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.039884 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-wmfhq,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-q7q85_openshift-marketplace(a9ba63f3-8505-42ff-8804-47823d199524): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.041857 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-q7q85" podUID="a9ba63f3-8505-42ff-8804-47823d199524" Nov 25 10:35:07 crc kubenswrapper[4702]: I1125 10:35:07.412867 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-fnlmg"] Nov 25 10:35:07 crc kubenswrapper[4702]: W1125 10:35:07.415218 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc8f0f344_2768_49e0_a344_81f5b457b671.slice/crio-fda092695231292f61f22d31166b6bbb2a9e5621b342cb2a0aad1f6f487494a5 WatchSource:0}: Error finding container fda092695231292f61f22d31166b6bbb2a9e5621b342cb2a0aad1f6f487494a5: Status 404 returned error can't find the container with id fda092695231292f61f22d31166b6bbb2a9e5621b342cb2a0aad1f6f487494a5 Nov 25 10:35:07 crc kubenswrapper[4702]: I1125 10:35:07.869043 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" event={"ID":"c8f0f344-2768-49e0-a344-81f5b457b671","Type":"ContainerStarted","Data":"fda092695231292f61f22d31166b6bbb2a9e5621b342cb2a0aad1f6f487494a5"} Nov 25 10:35:07 crc kubenswrapper[4702]: E1125 10:35:07.870489 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-q7q85" podUID="a9ba63f3-8505-42ff-8804-47823d199524" Nov 25 10:35:08 crc kubenswrapper[4702]: E1125 10:35:08.720493 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Nov 25 10:35:08 crc kubenswrapper[4702]: E1125 10:35:08.720654 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-ndmcp,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-2qpnn_openshift-marketplace(2c770958-ad07-45e3-8793-16a1e66a0aca): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:35:08 crc kubenswrapper[4702]: E1125 10:35:08.721920 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-2qpnn" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" Nov 25 10:35:08 crc kubenswrapper[4702]: I1125 10:35:08.876438 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-2r4cg" event={"ID":"a8621fa2-6cb1-4e0e-b1ed-3f254430262b","Type":"ContainerStarted","Data":"cb1bb269653beb0f22f395fd5788a695184daf7a7085e33cd01091e4f7374431"} Nov 25 10:35:08 crc kubenswrapper[4702]: E1125 10:35:08.877731 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-2qpnn" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" Nov 25 10:35:09 crc kubenswrapper[4702]: I1125 10:35:09.883740 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" event={"ID":"c8f0f344-2768-49e0-a344-81f5b457b671","Type":"ContainerStarted","Data":"49b24a920311f51f8b9579e24064fba1600088c25647c41c52c71a8d6f9ffdb5"} Nov 25 10:35:09 crc kubenswrapper[4702]: I1125 10:35:09.884214 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:35:09 crc kubenswrapper[4702]: I1125 10:35:09.884291 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:35:09 crc kubenswrapper[4702]: I1125 10:35:09.884320 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:35:10 crc kubenswrapper[4702]: I1125 10:35:10.889214 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:35:10 crc kubenswrapper[4702]: I1125 10:35:10.889599 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:35:11 crc kubenswrapper[4702]: E1125 10:35:11.398489 4702 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Nov 25 10:35:11 crc kubenswrapper[4702]: E1125 10:35:11.398631 4702 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-mr5kd,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rlsjf_openshift-marketplace(78d54279-d896-4edd-b53b-c0458d7fddf6): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Nov 25 10:35:11 crc kubenswrapper[4702]: E1125 10:35:11.399859 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rlsjf" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" Nov 25 10:35:11 crc kubenswrapper[4702]: I1125 10:35:11.895838 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-fnlmg" event={"ID":"c8f0f344-2768-49e0-a344-81f5b457b671","Type":"ContainerStarted","Data":"0c0fde99b1707215d4cef10c228e51e81a86435017f50c6e296c54935a39c5dc"} Nov 25 10:35:11 crc kubenswrapper[4702]: E1125 10:35:11.898272 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-rlsjf" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" Nov 25 10:35:12 crc kubenswrapper[4702]: I1125 10:35:12.925081 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-fnlmg" podStartSLOduration=189.925062243 podStartE2EDuration="3m9.925062243s" podCreationTimestamp="2025-11-25 10:32:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:35:12.920824542 +0000 UTC m=+210.287420251" watchObservedRunningTime="2025-11-25 10:35:12.925062243 +0000 UTC m=+210.291657932" Nov 25 10:35:13 crc kubenswrapper[4702]: I1125 10:35:13.590755 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:35:13 crc kubenswrapper[4702]: I1125 10:35:13.591133 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:35:13 crc kubenswrapper[4702]: I1125 10:35:13.591372 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:35:13 crc kubenswrapper[4702]: I1125 10:35:13.594915 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:35:13 crc kubenswrapper[4702]: I1125 10:35:13.595228 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7" gracePeriod=600 Nov 25 10:35:14 crc kubenswrapper[4702]: I1125 10:35:14.697393 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:35:14 crc kubenswrapper[4702]: I1125 10:35:14.697460 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:35:14 crc kubenswrapper[4702]: I1125 10:35:14.697471 4702 patch_prober.go:28] interesting pod/downloads-7954f5f757-2r4cg container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" start-of-body= Nov 25 10:35:14 crc kubenswrapper[4702]: I1125 10:35:14.697503 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-2r4cg" podUID="a8621fa2-6cb1-4e0e-b1ed-3f254430262b" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.12:8080/\": dial tcp 10.217.0.12:8080: connect: connection refused" Nov 25 10:35:14 crc kubenswrapper[4702]: I1125 10:35:14.914348 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7" exitCode=0 Nov 25 10:35:14 crc kubenswrapper[4702]: I1125 10:35:14.914405 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7"} Nov 25 10:35:24 crc kubenswrapper[4702]: I1125 10:35:24.713968 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-2r4cg" Nov 25 10:36:44 crc kubenswrapper[4702]: I1125 10:36:44.073260 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"3710cfa79b28ce8750da845c642c667802a6fa3de7d52c73daec57f071ae2a10"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.096668 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerStarted","Data":"b6aa89bd0d6e9abe49c4c7b746cfb610ed3472c56193f361940d9a24d4cfeef5"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.099125 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerStarted","Data":"c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.100732 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerStarted","Data":"874178ba7ac518f1bac0f3978e68de15aadcc1660e173364fdaa9783b06edb7d"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.102435 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerStarted","Data":"0952bcbb09a8e5a3cc29f48b979545b94439e0ab9c5a31f3ea6c46a67cfe895f"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.104128 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerStarted","Data":"b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.106207 4702 generic.go:334] "Generic (PLEG): container finished" podID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerID="aff16879c431bfc8c3f93f7a4f3a1f1db42d1276f87c954fbb2d43a91afd0be2" exitCode=0 Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.106368 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rt925" event={"ID":"eb941106-0eca-47c9-82d6-ac91a36f1366","Type":"ContainerDied","Data":"aff16879c431bfc8c3f93f7a4f3a1f1db42d1276f87c954fbb2d43a91afd0be2"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.110791 4702 generic.go:334] "Generic (PLEG): container finished" podID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerID="6842242450c04469c372ca47bbd6e28653934ea354cd460f125107ad2caaffce" exitCode=0 Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.110847 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rzrq9" event={"ID":"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f","Type":"ContainerDied","Data":"6842242450c04469c372ca47bbd6e28653934ea354cd460f125107ad2caaffce"} Nov 25 10:36:46 crc kubenswrapper[4702]: I1125 10:36:46.118766 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerStarted","Data":"76cdbcf43cf39c2a93cb50363af3c958101e2ecf81845d249ea7bcf10fc7c367"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.126046 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rzrq9" event={"ID":"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f","Type":"ContainerStarted","Data":"48e713dd448ab12e39fc149c17800d03eca2eb1d9e8c75c8c16dcefe9bd7220c"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.128759 4702 generic.go:334] "Generic (PLEG): container finished" podID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerID="76cdbcf43cf39c2a93cb50363af3c958101e2ecf81845d249ea7bcf10fc7c367" exitCode=0 Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.128804 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerDied","Data":"76cdbcf43cf39c2a93cb50363af3c958101e2ecf81845d249ea7bcf10fc7c367"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.131549 4702 generic.go:334] "Generic (PLEG): container finished" podID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerID="b6aa89bd0d6e9abe49c4c7b746cfb610ed3472c56193f361940d9a24d4cfeef5" exitCode=0 Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.131640 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerDied","Data":"b6aa89bd0d6e9abe49c4c7b746cfb610ed3472c56193f361940d9a24d4cfeef5"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.136013 4702 generic.go:334] "Generic (PLEG): container finished" podID="a9ba63f3-8505-42ff-8804-47823d199524" containerID="c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7" exitCode=0 Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.136077 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerDied","Data":"c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.138550 4702 generic.go:334] "Generic (PLEG): container finished" podID="4150132d-8f23-48d9-9635-0701e221acbd" containerID="874178ba7ac518f1bac0f3978e68de15aadcc1660e173364fdaa9783b06edb7d" exitCode=0 Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.138587 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerDied","Data":"874178ba7ac518f1bac0f3978e68de15aadcc1660e173364fdaa9783b06edb7d"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.140682 4702 generic.go:334] "Generic (PLEG): container finished" podID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerID="0952bcbb09a8e5a3cc29f48b979545b94439e0ab9c5a31f3ea6c46a67cfe895f" exitCode=0 Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.140769 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerDied","Data":"0952bcbb09a8e5a3cc29f48b979545b94439e0ab9c5a31f3ea6c46a67cfe895f"} Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.151428 4702 generic.go:334] "Generic (PLEG): container finished" podID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerID="b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02" exitCode=0 Nov 25 10:36:47 crc kubenswrapper[4702]: I1125 10:36:47.151482 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerDied","Data":"b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02"} Nov 25 10:36:48 crc kubenswrapper[4702]: I1125 10:36:48.159676 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rt925" event={"ID":"eb941106-0eca-47c9-82d6-ac91a36f1366","Type":"ContainerStarted","Data":"f65b01b4815ae3b5af05fc1d659964781a0ac08f796cd83024abba3adac09b29"} Nov 25 10:36:48 crc kubenswrapper[4702]: I1125 10:36:48.178413 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-rzrq9" podStartSLOduration=2.745707262 podStartE2EDuration="2m29.178391954s" podCreationTimestamp="2025-11-25 10:34:19 +0000 UTC" firstStartedPulling="2025-11-25 10:34:20.510034127 +0000 UTC m=+157.876629816" lastFinishedPulling="2025-11-25 10:36:46.942718819 +0000 UTC m=+304.309314508" observedRunningTime="2025-11-25 10:36:48.177529228 +0000 UTC m=+305.544124927" watchObservedRunningTime="2025-11-25 10:36:48.178391954 +0000 UTC m=+305.544987643" Nov 25 10:36:48 crc kubenswrapper[4702]: I1125 10:36:48.195754 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-rt925" podStartSLOduration=4.463831984 podStartE2EDuration="2m31.195735216s" podCreationTimestamp="2025-11-25 10:34:17 +0000 UTC" firstStartedPulling="2025-11-25 10:34:20.480494894 +0000 UTC m=+157.847090583" lastFinishedPulling="2025-11-25 10:36:47.212398126 +0000 UTC m=+304.578993815" observedRunningTime="2025-11-25 10:36:48.19120924 +0000 UTC m=+305.557804939" watchObservedRunningTime="2025-11-25 10:36:48.195735216 +0000 UTC m=+305.562330895" Nov 25 10:36:49 crc kubenswrapper[4702]: I1125 10:36:49.168151 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerStarted","Data":"159c34f483a52a5a47ee55791dd68c1799ffd9b98e6b6cbc19a775e736f9ede6"} Nov 25 10:36:49 crc kubenswrapper[4702]: I1125 10:36:49.880190 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:36:49 crc kubenswrapper[4702]: I1125 10:36:49.880244 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:36:50 crc kubenswrapper[4702]: I1125 10:36:50.196341 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-dt97x" podStartSLOduration=4.036716934 podStartE2EDuration="2m33.196305283s" podCreationTimestamp="2025-11-25 10:34:17 +0000 UTC" firstStartedPulling="2025-11-25 10:34:18.650075541 +0000 UTC m=+156.016671230" lastFinishedPulling="2025-11-25 10:36:47.80966389 +0000 UTC m=+305.176259579" observedRunningTime="2025-11-25 10:36:50.19453113 +0000 UTC m=+307.561126819" watchObservedRunningTime="2025-11-25 10:36:50.196305283 +0000 UTC m=+307.562900972" Nov 25 10:36:51 crc kubenswrapper[4702]: I1125 10:36:51.339470 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-rzrq9" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="registry-server" probeResult="failure" output=< Nov 25 10:36:51 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:36:51 crc kubenswrapper[4702]: > Nov 25 10:36:56 crc kubenswrapper[4702]: I1125 10:36:56.214129 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerStarted","Data":"9e9001f2c1c9fda03a82ebe85dbb0781487f1da8430e2d52c5bf084400ca1445"} Nov 25 10:36:57 crc kubenswrapper[4702]: I1125 10:36:57.240961 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-4qhgc" podStartSLOduration=7.507674629 podStartE2EDuration="2m38.240944467s" podCreationTimestamp="2025-11-25 10:34:19 +0000 UTC" firstStartedPulling="2025-11-25 10:34:21.568976599 +0000 UTC m=+158.935572288" lastFinishedPulling="2025-11-25 10:36:52.302246437 +0000 UTC m=+309.668842126" observedRunningTime="2025-11-25 10:36:57.235755761 +0000 UTC m=+314.602351470" watchObservedRunningTime="2025-11-25 10:36:57.240944467 +0000 UTC m=+314.607540156" Nov 25 10:36:57 crc kubenswrapper[4702]: I1125 10:36:57.807356 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:36:57 crc kubenswrapper[4702]: I1125 10:36:57.807400 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:36:58 crc kubenswrapper[4702]: I1125 10:36:58.012868 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:36:58 crc kubenswrapper[4702]: I1125 10:36:58.013228 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:36:58 crc kubenswrapper[4702]: I1125 10:36:58.316108 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:36:58 crc kubenswrapper[4702]: I1125 10:36:58.317136 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:36:58 crc kubenswrapper[4702]: I1125 10:36:58.362945 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:36:58 crc kubenswrapper[4702]: I1125 10:36:58.372054 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:36:59 crc kubenswrapper[4702]: I1125 10:36:59.919548 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:36:59 crc kubenswrapper[4702]: I1125 10:36:59.963320 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:37:00 crc kubenswrapper[4702]: I1125 10:37:00.232525 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:37:00 crc kubenswrapper[4702]: I1125 10:37:00.232582 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:37:00 crc kubenswrapper[4702]: I1125 10:37:00.266727 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:37:00 crc kubenswrapper[4702]: I1125 10:37:00.348080 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:37:00 crc kubenswrapper[4702]: I1125 10:37:00.754239 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rt925"] Nov 25 10:37:00 crc kubenswrapper[4702]: I1125 10:37:00.754599 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-rt925" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="registry-server" containerID="cri-o://f65b01b4815ae3b5af05fc1d659964781a0ac08f796cd83024abba3adac09b29" gracePeriod=2 Nov 25 10:37:02 crc kubenswrapper[4702]: I1125 10:37:02.251439 4702 generic.go:334] "Generic (PLEG): container finished" podID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerID="f65b01b4815ae3b5af05fc1d659964781a0ac08f796cd83024abba3adac09b29" exitCode=0 Nov 25 10:37:02 crc kubenswrapper[4702]: I1125 10:37:02.251480 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rt925" event={"ID":"eb941106-0eca-47c9-82d6-ac91a36f1366","Type":"ContainerDied","Data":"f65b01b4815ae3b5af05fc1d659964781a0ac08f796cd83024abba3adac09b29"} Nov 25 10:37:02 crc kubenswrapper[4702]: I1125 10:37:02.550228 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qhgc"] Nov 25 10:37:02 crc kubenswrapper[4702]: I1125 10:37:02.550553 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-4qhgc" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="registry-server" containerID="cri-o://9e9001f2c1c9fda03a82ebe85dbb0781487f1da8430e2d52c5bf084400ca1445" gracePeriod=2 Nov 25 10:37:04 crc kubenswrapper[4702]: I1125 10:37:04.263205 4702 generic.go:334] "Generic (PLEG): container finished" podID="4150132d-8f23-48d9-9635-0701e221acbd" containerID="9e9001f2c1c9fda03a82ebe85dbb0781487f1da8430e2d52c5bf084400ca1445" exitCode=0 Nov 25 10:37:04 crc kubenswrapper[4702]: I1125 10:37:04.263272 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerDied","Data":"9e9001f2c1c9fda03a82ebe85dbb0781487f1da8430e2d52c5bf084400ca1445"} Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.026833 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.106328 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lm98d\" (UniqueName: \"kubernetes.io/projected/eb941106-0eca-47c9-82d6-ac91a36f1366-kube-api-access-lm98d\") pod \"eb941106-0eca-47c9-82d6-ac91a36f1366\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.106431 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-utilities\") pod \"eb941106-0eca-47c9-82d6-ac91a36f1366\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.106535 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-catalog-content\") pod \"eb941106-0eca-47c9-82d6-ac91a36f1366\" (UID: \"eb941106-0eca-47c9-82d6-ac91a36f1366\") " Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.107738 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-utilities" (OuterVolumeSpecName: "utilities") pod "eb941106-0eca-47c9-82d6-ac91a36f1366" (UID: "eb941106-0eca-47c9-82d6-ac91a36f1366"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.113779 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eb941106-0eca-47c9-82d6-ac91a36f1366-kube-api-access-lm98d" (OuterVolumeSpecName: "kube-api-access-lm98d") pod "eb941106-0eca-47c9-82d6-ac91a36f1366" (UID: "eb941106-0eca-47c9-82d6-ac91a36f1366"). InnerVolumeSpecName "kube-api-access-lm98d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.162714 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "eb941106-0eca-47c9-82d6-ac91a36f1366" (UID: "eb941106-0eca-47c9-82d6-ac91a36f1366"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.208211 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lm98d\" (UniqueName: \"kubernetes.io/projected/eb941106-0eca-47c9-82d6-ac91a36f1366-kube-api-access-lm98d\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.208255 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.208269 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/eb941106-0eca-47c9-82d6-ac91a36f1366-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.271246 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-rt925" event={"ID":"eb941106-0eca-47c9-82d6-ac91a36f1366","Type":"ContainerDied","Data":"0c8603b075ef1b4749e5703710a04d4d56986e412df7e2647a915269c881df86"} Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.271280 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-rt925" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.271318 4702 scope.go:117] "RemoveContainer" containerID="f65b01b4815ae3b5af05fc1d659964781a0ac08f796cd83024abba3adac09b29" Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.302724 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-rt925"] Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.305684 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-rt925"] Nov 25 10:37:05 crc kubenswrapper[4702]: I1125 10:37:05.411170 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" path="/var/lib/kubelet/pods/eb941106-0eca-47c9-82d6-ac91a36f1366/volumes" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.058431 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.152969 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qf7zl\" (UniqueName: \"kubernetes.io/projected/4150132d-8f23-48d9-9635-0701e221acbd-kube-api-access-qf7zl\") pod \"4150132d-8f23-48d9-9635-0701e221acbd\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.153019 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-utilities\") pod \"4150132d-8f23-48d9-9635-0701e221acbd\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.153102 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-catalog-content\") pod \"4150132d-8f23-48d9-9635-0701e221acbd\" (UID: \"4150132d-8f23-48d9-9635-0701e221acbd\") " Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.153831 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-utilities" (OuterVolumeSpecName: "utilities") pod "4150132d-8f23-48d9-9635-0701e221acbd" (UID: "4150132d-8f23-48d9-9635-0701e221acbd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.164189 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4150132d-8f23-48d9-9635-0701e221acbd-kube-api-access-qf7zl" (OuterVolumeSpecName: "kube-api-access-qf7zl") pod "4150132d-8f23-48d9-9635-0701e221acbd" (UID: "4150132d-8f23-48d9-9635-0701e221acbd"). InnerVolumeSpecName "kube-api-access-qf7zl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.172335 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4150132d-8f23-48d9-9635-0701e221acbd" (UID: "4150132d-8f23-48d9-9635-0701e221acbd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.254726 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qf7zl\" (UniqueName: \"kubernetes.io/projected/4150132d-8f23-48d9-9635-0701e221acbd-kube-api-access-qf7zl\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.254766 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.254776 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4150132d-8f23-48d9-9635-0701e221acbd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.299975 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-4qhgc" event={"ID":"4150132d-8f23-48d9-9635-0701e221acbd","Type":"ContainerDied","Data":"db1d7451cc5ce18c586064fed7c009dcd6fa4273e8478671122ab4508b91b296"} Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.300066 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-4qhgc" Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.359175 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qhgc"] Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.365867 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-4qhgc"] Nov 25 10:37:09 crc kubenswrapper[4702]: I1125 10:37:09.409629 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4150132d-8f23-48d9-9635-0701e221acbd" path="/var/lib/kubelet/pods/4150132d-8f23-48d9-9635-0701e221acbd/volumes" Nov 25 10:37:10 crc kubenswrapper[4702]: I1125 10:37:10.111241 4702 scope.go:117] "RemoveContainer" containerID="aff16879c431bfc8c3f93f7a4f3a1f1db42d1276f87c954fbb2d43a91afd0be2" Nov 25 10:37:12 crc kubenswrapper[4702]: I1125 10:37:12.768974 4702 scope.go:117] "RemoveContainer" containerID="73594cb5e83d99d91eb7cc2e179c7f976a8d8055ad8a6eb54536b1a125ecfad8" Nov 25 10:37:12 crc kubenswrapper[4702]: I1125 10:37:12.793503 4702 scope.go:117] "RemoveContainer" containerID="9e9001f2c1c9fda03a82ebe85dbb0781487f1da8430e2d52c5bf084400ca1445" Nov 25 10:37:12 crc kubenswrapper[4702]: I1125 10:37:12.811985 4702 scope.go:117] "RemoveContainer" containerID="874178ba7ac518f1bac0f3978e68de15aadcc1660e173364fdaa9783b06edb7d" Nov 25 10:37:12 crc kubenswrapper[4702]: I1125 10:37:12.887703 4702 scope.go:117] "RemoveContainer" containerID="b25e7448d700ee3b7d6853d3c3eb5541104333a5bd61e01bf3df6e3c403af1f4" Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.334966 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerStarted","Data":"d3bc0145bb3d1eaddd258d91949f55399e3c0cc7a400fb0a0bd88956b743627c"} Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.337484 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerStarted","Data":"dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298"} Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.339797 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerStarted","Data":"2723f615615f8604f6cc5163ee2998330b109f2534b129206c76499d78d1c436"} Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.342206 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerStarted","Data":"f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf"} Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.350697 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-vv8tj" podStartSLOduration=4.006962677 podStartE2EDuration="2m57.350679543s" podCreationTimestamp="2025-11-25 10:34:17 +0000 UTC" firstStartedPulling="2025-11-25 10:34:19.425231386 +0000 UTC m=+156.791827075" lastFinishedPulling="2025-11-25 10:37:12.768948252 +0000 UTC m=+330.135543941" observedRunningTime="2025-11-25 10:37:14.349546951 +0000 UTC m=+331.716142660" watchObservedRunningTime="2025-11-25 10:37:14.350679543 +0000 UTC m=+331.717275232" Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.366610 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rlsjf" podStartSLOduration=6.177527432 podStartE2EDuration="2m54.366590122s" podCreationTimestamp="2025-11-25 10:34:20 +0000 UTC" firstStartedPulling="2025-11-25 10:34:22.588608835 +0000 UTC m=+159.955204524" lastFinishedPulling="2025-11-25 10:37:10.777671485 +0000 UTC m=+328.144267214" observedRunningTime="2025-11-25 10:37:14.364197253 +0000 UTC m=+331.730792942" watchObservedRunningTime="2025-11-25 10:37:14.366590122 +0000 UTC m=+331.733185811" Nov 25 10:37:14 crc kubenswrapper[4702]: I1125 10:37:14.384741 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-2qpnn" podStartSLOduration=8.81206327 podStartE2EDuration="2m57.384720564s" podCreationTimestamp="2025-11-25 10:34:17 +0000 UTC" firstStartedPulling="2025-11-25 10:34:20.406049165 +0000 UTC m=+157.772644854" lastFinishedPulling="2025-11-25 10:37:08.978706459 +0000 UTC m=+326.345302148" observedRunningTime="2025-11-25 10:37:14.382325295 +0000 UTC m=+331.748921004" watchObservedRunningTime="2025-11-25 10:37:14.384720564 +0000 UTC m=+331.751316263" Nov 25 10:37:17 crc kubenswrapper[4702]: I1125 10:37:17.603543 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:37:17 crc kubenswrapper[4702]: I1125 10:37:17.604170 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:37:17 crc kubenswrapper[4702]: I1125 10:37:17.648757 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:37:17 crc kubenswrapper[4702]: I1125 10:37:17.668765 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-q7q85" podStartSLOduration=7.4834552 podStartE2EDuration="2m57.66874738s" podCreationTimestamp="2025-11-25 10:34:20 +0000 UTC" firstStartedPulling="2025-11-25 10:34:22.583635911 +0000 UTC m=+159.950231600" lastFinishedPulling="2025-11-25 10:37:12.768928071 +0000 UTC m=+330.135523780" observedRunningTime="2025-11-25 10:37:14.402784744 +0000 UTC m=+331.769380433" watchObservedRunningTime="2025-11-25 10:37:17.66874738 +0000 UTC m=+335.035343069" Nov 25 10:37:18 crc kubenswrapper[4702]: I1125 10:37:18.219176 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:37:18 crc kubenswrapper[4702]: I1125 10:37:18.219243 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:37:18 crc kubenswrapper[4702]: I1125 10:37:18.258822 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:37:18 crc kubenswrapper[4702]: I1125 10:37:18.408646 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:37:18 crc kubenswrapper[4702]: I1125 10:37:18.411621 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:37:20 crc kubenswrapper[4702]: I1125 10:37:20.569385 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2qpnn"] Nov 25 10:37:20 crc kubenswrapper[4702]: I1125 10:37:20.571272 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-2qpnn" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="registry-server" containerID="cri-o://2723f615615f8604f6cc5163ee2998330b109f2534b129206c76499d78d1c436" gracePeriod=2 Nov 25 10:37:20 crc kubenswrapper[4702]: I1125 10:37:20.846971 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:37:20 crc kubenswrapper[4702]: I1125 10:37:20.847339 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:37:20 crc kubenswrapper[4702]: I1125 10:37:20.889112 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.268255 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.268297 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.308352 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.388629 4702 generic.go:334] "Generic (PLEG): container finished" podID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerID="2723f615615f8604f6cc5163ee2998330b109f2534b129206c76499d78d1c436" exitCode=0 Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.389389 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerDied","Data":"2723f615615f8604f6cc5163ee2998330b109f2534b129206c76499d78d1c436"} Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.389423 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-2qpnn" event={"ID":"2c770958-ad07-45e3-8793-16a1e66a0aca","Type":"ContainerDied","Data":"3e60a406f644f96b8c557dc5f6adb6ac37f91614ef434b47b444a96ace80d6ae"} Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.389434 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3e60a406f644f96b8c557dc5f6adb6ac37f91614ef434b47b444a96ace80d6ae" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.406654 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.426185 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-catalog-content\") pod \"2c770958-ad07-45e3-8793-16a1e66a0aca\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.426244 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ndmcp\" (UniqueName: \"kubernetes.io/projected/2c770958-ad07-45e3-8793-16a1e66a0aca-kube-api-access-ndmcp\") pod \"2c770958-ad07-45e3-8793-16a1e66a0aca\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.426271 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-utilities\") pod \"2c770958-ad07-45e3-8793-16a1e66a0aca\" (UID: \"2c770958-ad07-45e3-8793-16a1e66a0aca\") " Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.427545 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-utilities" (OuterVolumeSpecName: "utilities") pod "2c770958-ad07-45e3-8793-16a1e66a0aca" (UID: "2c770958-ad07-45e3-8793-16a1e66a0aca"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.429267 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.442659 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.447124 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2c770958-ad07-45e3-8793-16a1e66a0aca-kube-api-access-ndmcp" (OuterVolumeSpecName: "kube-api-access-ndmcp") pod "2c770958-ad07-45e3-8793-16a1e66a0aca" (UID: "2c770958-ad07-45e3-8793-16a1e66a0aca"). InnerVolumeSpecName "kube-api-access-ndmcp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.493673 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2c770958-ad07-45e3-8793-16a1e66a0aca" (UID: "2c770958-ad07-45e3-8793-16a1e66a0aca"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.527094 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.527147 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ndmcp\" (UniqueName: \"kubernetes.io/projected/2c770958-ad07-45e3-8793-16a1e66a0aca-kube-api-access-ndmcp\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:21 crc kubenswrapper[4702]: I1125 10:37:21.527160 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2c770958-ad07-45e3-8793-16a1e66a0aca-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:22 crc kubenswrapper[4702]: I1125 10:37:22.392783 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-2qpnn" Nov 25 10:37:22 crc kubenswrapper[4702]: I1125 10:37:22.418468 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-2qpnn"] Nov 25 10:37:22 crc kubenswrapper[4702]: I1125 10:37:22.421498 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-2qpnn"] Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.167672 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rlsjf"] Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.403980 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rlsjf" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="registry-server" containerID="cri-o://f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf" gracePeriod=2 Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.409361 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" path="/var/lib/kubelet/pods/2c770958-ad07-45e3-8793-16a1e66a0aca/volumes" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.732820 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.754517 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mr5kd\" (UniqueName: \"kubernetes.io/projected/78d54279-d896-4edd-b53b-c0458d7fddf6-kube-api-access-mr5kd\") pod \"78d54279-d896-4edd-b53b-c0458d7fddf6\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.754626 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-catalog-content\") pod \"78d54279-d896-4edd-b53b-c0458d7fddf6\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.754783 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-utilities\") pod \"78d54279-d896-4edd-b53b-c0458d7fddf6\" (UID: \"78d54279-d896-4edd-b53b-c0458d7fddf6\") " Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.756323 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-utilities" (OuterVolumeSpecName: "utilities") pod "78d54279-d896-4edd-b53b-c0458d7fddf6" (UID: "78d54279-d896-4edd-b53b-c0458d7fddf6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.761901 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78d54279-d896-4edd-b53b-c0458d7fddf6-kube-api-access-mr5kd" (OuterVolumeSpecName: "kube-api-access-mr5kd") pod "78d54279-d896-4edd-b53b-c0458d7fddf6" (UID: "78d54279-d896-4edd-b53b-c0458d7fddf6"). InnerVolumeSpecName "kube-api-access-mr5kd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.843197 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "78d54279-d896-4edd-b53b-c0458d7fddf6" (UID: "78d54279-d896-4edd-b53b-c0458d7fddf6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.855694 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.855726 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mr5kd\" (UniqueName: \"kubernetes.io/projected/78d54279-d896-4edd-b53b-c0458d7fddf6-kube-api-access-mr5kd\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:23 crc kubenswrapper[4702]: I1125 10:37:23.855736 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/78d54279-d896-4edd-b53b-c0458d7fddf6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.409675 4702 generic.go:334] "Generic (PLEG): container finished" podID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerID="f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf" exitCode=0 Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.409728 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerDied","Data":"f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf"} Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.409755 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rlsjf" event={"ID":"78d54279-d896-4edd-b53b-c0458d7fddf6","Type":"ContainerDied","Data":"726481da5ea28536dfeada63e4657434e1ad37e6aea3a3a7e125a14d9d22e18c"} Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.409773 4702 scope.go:117] "RemoveContainer" containerID="f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.409919 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rlsjf" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.434876 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rlsjf"] Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.437993 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rlsjf"] Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.438787 4702 scope.go:117] "RemoveContainer" containerID="b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.478882 4702 scope.go:117] "RemoveContainer" containerID="7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.493373 4702 scope.go:117] "RemoveContainer" containerID="f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf" Nov 25 10:37:24 crc kubenswrapper[4702]: E1125 10:37:24.493992 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf\": container with ID starting with f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf not found: ID does not exist" containerID="f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.494030 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf"} err="failed to get container status \"f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf\": rpc error: code = NotFound desc = could not find container \"f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf\": container with ID starting with f62bee14a8ab6d79f485be211d627c7a799ddddc12c305b7bd5142785ee926bf not found: ID does not exist" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.494050 4702 scope.go:117] "RemoveContainer" containerID="b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02" Nov 25 10:37:24 crc kubenswrapper[4702]: E1125 10:37:24.494277 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02\": container with ID starting with b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02 not found: ID does not exist" containerID="b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.494310 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02"} err="failed to get container status \"b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02\": rpc error: code = NotFound desc = could not find container \"b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02\": container with ID starting with b14159559f6a85e3c2cea80a67a542aa9ce9bb5ca0109f98827098023df5ed02 not found: ID does not exist" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.494328 4702 scope.go:117] "RemoveContainer" containerID="7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa" Nov 25 10:37:24 crc kubenswrapper[4702]: E1125 10:37:24.494696 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa\": container with ID starting with 7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa not found: ID does not exist" containerID="7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa" Nov 25 10:37:24 crc kubenswrapper[4702]: I1125 10:37:24.494727 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa"} err="failed to get container status \"7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa\": rpc error: code = NotFound desc = could not find container \"7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa\": container with ID starting with 7319a4cb02c006cf86aaabe3d30a4bdb70e19c433a0a0e80df804c49d8bcf6aa not found: ID does not exist" Nov 25 10:37:25 crc kubenswrapper[4702]: I1125 10:37:25.408583 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" path="/var/lib/kubelet/pods/78d54279-d896-4edd-b53b-c0458d7fddf6/volumes" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.010927 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vv8tj"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.013692 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-vv8tj" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="registry-server" containerID="cri-o://d3bc0145bb3d1eaddd258d91949f55399e3c0cc7a400fb0a0bd88956b743627c" gracePeriod=30 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.028094 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dt97x"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.028716 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-dt97x" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="registry-server" containerID="cri-o://159c34f483a52a5a47ee55791dd68c1799ffd9b98e6b6cbc19a775e736f9ede6" gracePeriod=30 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.030736 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d5g4l"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.031054 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" containerID="cri-o://1dca76e51b4e63136c0843f805f41bf1b2d51d0f79642cfdb03eaddd508a3807" gracePeriod=30 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.041327 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rzrq9"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.041634 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-rzrq9" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="registry-server" containerID="cri-o://48e713dd448ab12e39fc149c17800d03eca2eb1d9e8c75c8c16dcefe9bd7220c" gracePeriod=30 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.047593 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7mlcv"] Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.047819 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058358 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058396 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058406 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058421 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058429 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058446 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058455 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058467 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058474 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058495 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1ced20a2-478c-47da-a590-fea4bcbaebd9" containerName="pruner" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058504 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1ced20a2-478c-47da-a590-fea4bcbaebd9" containerName="pruner" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058514 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058521 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058531 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058538 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058550 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058558 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058569 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058577 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058588 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058595 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058607 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058615 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="extract-content" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.058625 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058633 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="extract-utilities" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058839 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="2c770958-ad07-45e3-8793-16a1e66a0aca" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058852 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="78d54279-d896-4edd-b53b-c0458d7fddf6" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058861 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="eb941106-0eca-47c9-82d6-ac91a36f1366" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058872 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="4150132d-8f23-48d9-9635-0701e221acbd" containerName="registry-server" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.058883 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="1ced20a2-478c-47da-a590-fea4bcbaebd9" containerName="pruner" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.059348 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q7q85"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.059531 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-q7q85" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="registry-server" containerID="cri-o://dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298" gracePeriod=30 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.059354 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.080813 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7mlcv"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.168543 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xsjdn\" (UniqueName: \"kubernetes.io/projected/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-kube-api-access-xsjdn\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.168636 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.168658 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.269809 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xsjdn\" (UniqueName: \"kubernetes.io/projected/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-kube-api-access-xsjdn\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.269882 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.269931 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.271396 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.279976 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.299813 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xsjdn\" (UniqueName: \"kubernetes.io/projected/71f4b5c7-e444-4858-aa1d-4c80e32a7e96-kube-api-access-xsjdn\") pod \"marketplace-operator-79b997595-7mlcv\" (UID: \"71f4b5c7-e444-4858-aa1d-4c80e32a7e96\") " pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.389431 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.510201 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.569614 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-7mlcv"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.573450 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmfhq\" (UniqueName: \"kubernetes.io/projected/a9ba63f3-8505-42ff-8804-47823d199524-kube-api-access-wmfhq\") pod \"a9ba63f3-8505-42ff-8804-47823d199524\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.573565 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-utilities\") pod \"a9ba63f3-8505-42ff-8804-47823d199524\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.573593 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-catalog-content\") pod \"a9ba63f3-8505-42ff-8804-47823d199524\" (UID: \"a9ba63f3-8505-42ff-8804-47823d199524\") " Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.574442 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-utilities" (OuterVolumeSpecName: "utilities") pod "a9ba63f3-8505-42ff-8804-47823d199524" (UID: "a9ba63f3-8505-42ff-8804-47823d199524"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.575349 4702 generic.go:334] "Generic (PLEG): container finished" podID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerID="1dca76e51b4e63136c0843f805f41bf1b2d51d0f79642cfdb03eaddd508a3807" exitCode=0 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.575406 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" event={"ID":"e22e5523-d9e6-4257-bd76-b216c4bee1be","Type":"ContainerDied","Data":"1dca76e51b4e63136c0843f805f41bf1b2d51d0f79642cfdb03eaddd508a3807"} Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.590958 4702 generic.go:334] "Generic (PLEG): container finished" podID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerID="48e713dd448ab12e39fc149c17800d03eca2eb1d9e8c75c8c16dcefe9bd7220c" exitCode=0 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.591063 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rzrq9" event={"ID":"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f","Type":"ContainerDied","Data":"48e713dd448ab12e39fc149c17800d03eca2eb1d9e8c75c8c16dcefe9bd7220c"} Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.592107 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9ba63f3-8505-42ff-8804-47823d199524-kube-api-access-wmfhq" (OuterVolumeSpecName: "kube-api-access-wmfhq") pod "a9ba63f3-8505-42ff-8804-47823d199524" (UID: "a9ba63f3-8505-42ff-8804-47823d199524"). InnerVolumeSpecName "kube-api-access-wmfhq". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.595324 4702 generic.go:334] "Generic (PLEG): container finished" podID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerID="d3bc0145bb3d1eaddd258d91949f55399e3c0cc7a400fb0a0bd88956b743627c" exitCode=0 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.595389 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerDied","Data":"d3bc0145bb3d1eaddd258d91949f55399e3c0cc7a400fb0a0bd88956b743627c"} Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.610891 4702 generic.go:334] "Generic (PLEG): container finished" podID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerID="159c34f483a52a5a47ee55791dd68c1799ffd9b98e6b6cbc19a775e736f9ede6" exitCode=0 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.610971 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerDied","Data":"159c34f483a52a5a47ee55791dd68c1799ffd9b98e6b6cbc19a775e736f9ede6"} Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.613116 4702 generic.go:334] "Generic (PLEG): container finished" podID="a9ba63f3-8505-42ff-8804-47823d199524" containerID="dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298" exitCode=0 Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.613137 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerDied","Data":"dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298"} Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.613150 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-q7q85" event={"ID":"a9ba63f3-8505-42ff-8804-47823d199524","Type":"ContainerDied","Data":"a78503789b5dc7b17b7348de7d161361b506c4910b55275fbacf48dcaa37855c"} Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.613166 4702 scope.go:117] "RemoveContainer" containerID="dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.613283 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-q7q85" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.630829 4702 scope.go:117] "RemoveContainer" containerID="c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.653513 4702 scope.go:117] "RemoveContainer" containerID="f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.654005 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.686765 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a9ba63f3-8505-42ff-8804-47823d199524" (UID: "a9ba63f3-8505-42ff-8804-47823d199524"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.688121 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-utilities\") pod \"5df05e89-c694-4234-b4fe-669de4c1dec5\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.688196 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j7zgz\" (UniqueName: \"kubernetes.io/projected/5df05e89-c694-4234-b4fe-669de4c1dec5-kube-api-access-j7zgz\") pod \"5df05e89-c694-4234-b4fe-669de4c1dec5\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.688221 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-catalog-content\") pod \"5df05e89-c694-4234-b4fe-669de4c1dec5\" (UID: \"5df05e89-c694-4234-b4fe-669de4c1dec5\") " Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.688427 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmfhq\" (UniqueName: \"kubernetes.io/projected/a9ba63f3-8505-42ff-8804-47823d199524-kube-api-access-wmfhq\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.688445 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.688453 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a9ba63f3-8505-42ff-8804-47823d199524-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.690110 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-utilities" (OuterVolumeSpecName: "utilities") pod "5df05e89-c694-4234-b4fe-669de4c1dec5" (UID: "5df05e89-c694-4234-b4fe-669de4c1dec5"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.691697 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5df05e89-c694-4234-b4fe-669de4c1dec5-kube-api-access-j7zgz" (OuterVolumeSpecName: "kube-api-access-j7zgz") pod "5df05e89-c694-4234-b4fe-669de4c1dec5" (UID: "5df05e89-c694-4234-b4fe-669de4c1dec5"). InnerVolumeSpecName "kube-api-access-j7zgz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.694378 4702 scope.go:117] "RemoveContainer" containerID="dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.694932 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298\": container with ID starting with dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298 not found: ID does not exist" containerID="dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.694968 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298"} err="failed to get container status \"dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298\": rpc error: code = NotFound desc = could not find container \"dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298\": container with ID starting with dac2258504ddd7fae4e6d913f7bec34dcd59b669f5246a464298fb2b6e6b0298 not found: ID does not exist" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.695013 4702 scope.go:117] "RemoveContainer" containerID="c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.695410 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7\": container with ID starting with c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7 not found: ID does not exist" containerID="c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.695443 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7"} err="failed to get container status \"c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7\": rpc error: code = NotFound desc = could not find container \"c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7\": container with ID starting with c79804f8539dcf774cbc13ffd214f18fbf8d111e6f0b3b0290d87c1f5d244ce7 not found: ID does not exist" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.695465 4702 scope.go:117] "RemoveContainer" containerID="f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87" Nov 25 10:37:56 crc kubenswrapper[4702]: E1125 10:37:56.695703 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87\": container with ID starting with f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87 not found: ID does not exist" containerID="f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.695728 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87"} err="failed to get container status \"f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87\": rpc error: code = NotFound desc = could not find container \"f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87\": container with ID starting with f0700a346d4ee82107d5fa286499676e1626882c898af65bec44d5155c880f87 not found: ID does not exist" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.741267 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5df05e89-c694-4234-b4fe-669de4c1dec5" (UID: "5df05e89-c694-4234-b4fe-669de4c1dec5"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.789499 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.789536 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j7zgz\" (UniqueName: \"kubernetes.io/projected/5df05e89-c694-4234-b4fe-669de4c1dec5-kube-api-access-j7zgz\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.789547 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5df05e89-c694-4234-b4fe-669de4c1dec5-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.990793 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-q7q85"] Nov 25 10:37:56 crc kubenswrapper[4702]: I1125 10:37:56.998655 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-q7q85"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.011368 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.095314 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rthpp\" (UniqueName: \"kubernetes.io/projected/58eeaa3d-0858-43f1-a047-52775d340bc0-kube-api-access-rthpp\") pod \"58eeaa3d-0858-43f1-a047-52775d340bc0\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.095392 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-catalog-content\") pod \"58eeaa3d-0858-43f1-a047-52775d340bc0\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.095464 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-utilities\") pod \"58eeaa3d-0858-43f1-a047-52775d340bc0\" (UID: \"58eeaa3d-0858-43f1-a047-52775d340bc0\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.099009 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-utilities" (OuterVolumeSpecName: "utilities") pod "58eeaa3d-0858-43f1-a047-52775d340bc0" (UID: "58eeaa3d-0858-43f1-a047-52775d340bc0"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.100197 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58eeaa3d-0858-43f1-a047-52775d340bc0-kube-api-access-rthpp" (OuterVolumeSpecName: "kube-api-access-rthpp") pod "58eeaa3d-0858-43f1-a047-52775d340bc0" (UID: "58eeaa3d-0858-43f1-a047-52775d340bc0"). InnerVolumeSpecName "kube-api-access-rthpp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.127799 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.157554 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "58eeaa3d-0858-43f1-a047-52775d340bc0" (UID: "58eeaa3d-0858-43f1-a047-52775d340bc0"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.187490 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.196656 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-utilities\") pod \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.196715 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7qzb6\" (UniqueName: \"kubernetes.io/projected/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-kube-api-access-7qzb6\") pod \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.196817 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-catalog-content\") pod \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\" (UID: \"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.197088 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rthpp\" (UniqueName: \"kubernetes.io/projected/58eeaa3d-0858-43f1-a047-52775d340bc0-kube-api-access-rthpp\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.197106 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.197118 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/58eeaa3d-0858-43f1-a047-52775d340bc0-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.197447 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-utilities" (OuterVolumeSpecName: "utilities") pod "cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" (UID: "cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.209548 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-kube-api-access-7qzb6" (OuterVolumeSpecName: "kube-api-access-7qzb6") pod "cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" (UID: "cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f"). InnerVolumeSpecName "kube-api-access-7qzb6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.225295 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" (UID: "cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.297916 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-operator-metrics\") pod \"e22e5523-d9e6-4257-bd76-b216c4bee1be\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.297990 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l9t89\" (UniqueName: \"kubernetes.io/projected/e22e5523-d9e6-4257-bd76-b216c4bee1be-kube-api-access-l9t89\") pod \"e22e5523-d9e6-4257-bd76-b216c4bee1be\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.298067 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-trusted-ca\") pod \"e22e5523-d9e6-4257-bd76-b216c4bee1be\" (UID: \"e22e5523-d9e6-4257-bd76-b216c4bee1be\") " Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.298344 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.298371 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.298382 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7qzb6\" (UniqueName: \"kubernetes.io/projected/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f-kube-api-access-7qzb6\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.298755 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "e22e5523-d9e6-4257-bd76-b216c4bee1be" (UID: "e22e5523-d9e6-4257-bd76-b216c4bee1be"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.301686 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e22e5523-d9e6-4257-bd76-b216c4bee1be-kube-api-access-l9t89" (OuterVolumeSpecName: "kube-api-access-l9t89") pod "e22e5523-d9e6-4257-bd76-b216c4bee1be" (UID: "e22e5523-d9e6-4257-bd76-b216c4bee1be"). InnerVolumeSpecName "kube-api-access-l9t89". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.303300 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "e22e5523-d9e6-4257-bd76-b216c4bee1be" (UID: "e22e5523-d9e6-4257-bd76-b216c4bee1be"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.399585 4702 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.399623 4702 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/e22e5523-d9e6-4257-bd76-b216c4bee1be-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.399636 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l9t89\" (UniqueName: \"kubernetes.io/projected/e22e5523-d9e6-4257-bd76-b216c4bee1be-kube-api-access-l9t89\") on node \"crc\" DevicePath \"\"" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.408144 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9ba63f3-8505-42ff-8804-47823d199524" path="/var/lib/kubelet/pods/a9ba63f3-8505-42ff-8804-47823d199524/volumes" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.632801 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" event={"ID":"e22e5523-d9e6-4257-bd76-b216c4bee1be","Type":"ContainerDied","Data":"eeb737073517eb8f011a662c4fce9f88a45c7e9e28e2a566b7a4593c27ca45ae"} Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.632864 4702 scope.go:117] "RemoveContainer" containerID="1dca76e51b4e63136c0843f805f41bf1b2d51d0f79642cfdb03eaddd508a3807" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.633000 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-d5g4l" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.647387 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" event={"ID":"71f4b5c7-e444-4858-aa1d-4c80e32a7e96","Type":"ContainerStarted","Data":"2c0040dc2665062131d83f13c1e806f1d958c17eaefca9bf2c8aa804de081853"} Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.647433 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" event={"ID":"71f4b5c7-e444-4858-aa1d-4c80e32a7e96","Type":"ContainerStarted","Data":"03bb70ce853c65a2e4d0e7fb47d16251944937265bc839cef4c5ee7cafc5b528"} Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.649499 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.659677 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.661487 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-rzrq9" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.662437 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-rzrq9" event={"ID":"cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f","Type":"ContainerDied","Data":"058dbe96be77101fb7f5a4c0bcbb1b4f9a9c31567c75cc6de9beeb3aafc1113b"} Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.667946 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-vv8tj" event={"ID":"58eeaa3d-0858-43f1-a047-52775d340bc0","Type":"ContainerDied","Data":"ef78860628f5a45b8c496b80870ce5ed5fe98d943e767ff7ca50f70670b739ba"} Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.667965 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-vv8tj" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.670881 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d5g4l"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.676696 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-dt97x" event={"ID":"5df05e89-c694-4234-b4fe-669de4c1dec5","Type":"ContainerDied","Data":"6fc4f2b3b601304da8eeaef97b3079cdac427b4f2fd91a9d810c823c853bfd24"} Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.677001 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-dt97x" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.678228 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-d5g4l"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.682392 4702 scope.go:117] "RemoveContainer" containerID="48e713dd448ab12e39fc149c17800d03eca2eb1d9e8c75c8c16dcefe9bd7220c" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.690937 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-vv8tj"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.697113 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-vv8tj"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.715087 4702 scope.go:117] "RemoveContainer" containerID="6842242450c04469c372ca47bbd6e28653934ea354cd460f125107ad2caaffce" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.740131 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-7mlcv" podStartSLOduration=1.740108349 podStartE2EDuration="1.740108349s" podCreationTimestamp="2025-11-25 10:37:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:37:57.726224549 +0000 UTC m=+375.092820238" watchObservedRunningTime="2025-11-25 10:37:57.740108349 +0000 UTC m=+375.106704038" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.741233 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-w2kdw"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.750033 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-rzrq9"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.762815 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-rzrq9"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.772247 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-dt97x"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.776985 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-dt97x"] Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.780167 4702 scope.go:117] "RemoveContainer" containerID="bbdcab74af86b8c5617210050c26414e425b311021b4d106d8365179079a28cb" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.812763 4702 scope.go:117] "RemoveContainer" containerID="d3bc0145bb3d1eaddd258d91949f55399e3c0cc7a400fb0a0bd88956b743627c" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.831325 4702 scope.go:117] "RemoveContainer" containerID="76cdbcf43cf39c2a93cb50363af3c958101e2ecf81845d249ea7bcf10fc7c367" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.847675 4702 scope.go:117] "RemoveContainer" containerID="96c46a903ba35e41877319eb88f906065d3242a7ec9e3ce46d99cefed2d111a8" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.883175 4702 scope.go:117] "RemoveContainer" containerID="159c34f483a52a5a47ee55791dd68c1799ffd9b98e6b6cbc19a775e736f9ede6" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.910008 4702 scope.go:117] "RemoveContainer" containerID="b6aa89bd0d6e9abe49c4c7b746cfb610ed3472c56193f361940d9a24d4cfeef5" Nov 25 10:37:57 crc kubenswrapper[4702]: I1125 10:37:57.938199 4702 scope.go:117] "RemoveContainer" containerID="e74d018441003dd1022892460e4c59f102076042a2a917b7e287acda31cf7db9" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.218806 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-8pk8v"] Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219032 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219048 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219057 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219063 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219071 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219080 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219092 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219098 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219107 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219114 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219124 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219131 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219138 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219145 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219154 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219170 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219180 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219187 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219198 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219207 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219219 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219225 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219231 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219237 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="extract-content" Nov 25 10:37:58 crc kubenswrapper[4702]: E1125 10:37:58.219246 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219251 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="extract-utilities" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219357 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219371 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9ba63f3-8505-42ff-8804-47823d199524" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219381 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" containerName="marketplace-operator" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219393 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.219404 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" containerName="registry-server" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.220144 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.222093 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.229230 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8pk8v"] Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.312339 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aeef268-5082-47e1-8bc5-7e66f64509e2-utilities\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.312415 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gv855\" (UniqueName: \"kubernetes.io/projected/0aeef268-5082-47e1-8bc5-7e66f64509e2-kube-api-access-gv855\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.312447 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aeef268-5082-47e1-8bc5-7e66f64509e2-catalog-content\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.415683 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aeef268-5082-47e1-8bc5-7e66f64509e2-utilities\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.415739 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gv855\" (UniqueName: \"kubernetes.io/projected/0aeef268-5082-47e1-8bc5-7e66f64509e2-kube-api-access-gv855\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.415768 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aeef268-5082-47e1-8bc5-7e66f64509e2-catalog-content\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.416343 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0aeef268-5082-47e1-8bc5-7e66f64509e2-utilities\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.420742 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0aeef268-5082-47e1-8bc5-7e66f64509e2-catalog-content\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.437045 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-nsh8t"] Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.438376 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.440396 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsh8t"] Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.441278 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.452546 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gv855\" (UniqueName: \"kubernetes.io/projected/0aeef268-5082-47e1-8bc5-7e66f64509e2-kube-api-access-gv855\") pod \"certified-operators-8pk8v\" (UID: \"0aeef268-5082-47e1-8bc5-7e66f64509e2\") " pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.517268 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-utilities\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.517357 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-catalog-content\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.517535 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v4qvc\" (UniqueName: \"kubernetes.io/projected/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-kube-api-access-v4qvc\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.538996 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.618667 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v4qvc\" (UniqueName: \"kubernetes.io/projected/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-kube-api-access-v4qvc\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.619045 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-utilities\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.619069 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-catalog-content\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.619612 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-catalog-content\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.619739 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-utilities\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.650296 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v4qvc\" (UniqueName: \"kubernetes.io/projected/efcb9706-ad0f-487f-8aae-e3ebd4b88d7d-kube-api-access-v4qvc\") pod \"community-operators-nsh8t\" (UID: \"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d\") " pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.781759 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:37:58 crc kubenswrapper[4702]: I1125 10:37:58.938890 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-8pk8v"] Nov 25 10:37:58 crc kubenswrapper[4702]: W1125 10:37:58.979135 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0aeef268_5082_47e1_8bc5_7e66f64509e2.slice/crio-10005fd0fd8e68780af943369064a27bcb831d93c553a922837bf2888ec124ee WatchSource:0}: Error finding container 10005fd0fd8e68780af943369064a27bcb831d93c553a922837bf2888ec124ee: Status 404 returned error can't find the container with id 10005fd0fd8e68780af943369064a27bcb831d93c553a922837bf2888ec124ee Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.162943 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-nsh8t"] Nov 25 10:37:59 crc kubenswrapper[4702]: W1125 10:37:59.231331 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podefcb9706_ad0f_487f_8aae_e3ebd4b88d7d.slice/crio-d96c9aed3834bbff7132f1bc14a7d970b7aeabc9f36084d24901a6dd1311f94c WatchSource:0}: Error finding container d96c9aed3834bbff7132f1bc14a7d970b7aeabc9f36084d24901a6dd1311f94c: Status 404 returned error can't find the container with id d96c9aed3834bbff7132f1bc14a7d970b7aeabc9f36084d24901a6dd1311f94c Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.409737 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58eeaa3d-0858-43f1-a047-52775d340bc0" path="/var/lib/kubelet/pods/58eeaa3d-0858-43f1-a047-52775d340bc0/volumes" Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.410792 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5df05e89-c694-4234-b4fe-669de4c1dec5" path="/var/lib/kubelet/pods/5df05e89-c694-4234-b4fe-669de4c1dec5/volumes" Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.411638 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f" path="/var/lib/kubelet/pods/cf22dd3c-7d1f-42a7-91b2-a260b8bc1d4f/volumes" Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.413068 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e22e5523-d9e6-4257-bd76-b216c4bee1be" path="/var/lib/kubelet/pods/e22e5523-d9e6-4257-bd76-b216c4bee1be/volumes" Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.692310 4702 generic.go:334] "Generic (PLEG): container finished" podID="0aeef268-5082-47e1-8bc5-7e66f64509e2" containerID="f97fdc6695a7357fc54c5d20aa65dfc39dd16b8064fe6f7e031fcfbbe11a3975" exitCode=0 Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.692407 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8pk8v" event={"ID":"0aeef268-5082-47e1-8bc5-7e66f64509e2","Type":"ContainerDied","Data":"f97fdc6695a7357fc54c5d20aa65dfc39dd16b8064fe6f7e031fcfbbe11a3975"} Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.692446 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8pk8v" event={"ID":"0aeef268-5082-47e1-8bc5-7e66f64509e2","Type":"ContainerStarted","Data":"10005fd0fd8e68780af943369064a27bcb831d93c553a922837bf2888ec124ee"} Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.693807 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsh8t" event={"ID":"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d","Type":"ContainerDied","Data":"563d160ed093db4d4edfa739e40efb2b5dda6081e7d4712946d570f92e6c07d2"} Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.693827 4702 generic.go:334] "Generic (PLEG): container finished" podID="efcb9706-ad0f-487f-8aae-e3ebd4b88d7d" containerID="563d160ed093db4d4edfa739e40efb2b5dda6081e7d4712946d570f92e6c07d2" exitCode=0 Nov 25 10:37:59 crc kubenswrapper[4702]: I1125 10:37:59.693926 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsh8t" event={"ID":"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d","Type":"ContainerStarted","Data":"d96c9aed3834bbff7132f1bc14a7d970b7aeabc9f36084d24901a6dd1311f94c"} Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.638499 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.641442 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.643344 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.647069 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.750040 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lqj5c\" (UniqueName: \"kubernetes.io/projected/b8b2cad5-dc20-4654-a6e7-4326383a6dda-kube-api-access-lqj5c\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.750112 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b2cad5-dc20-4654-a6e7-4326383a6dda-catalog-content\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.750140 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b2cad5-dc20-4654-a6e7-4326383a6dda-utilities\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.844409 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wktsv"] Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.845526 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.846983 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wktsv"] Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.847414 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.851613 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lqj5c\" (UniqueName: \"kubernetes.io/projected/b8b2cad5-dc20-4654-a6e7-4326383a6dda-kube-api-access-lqj5c\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.851689 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b2cad5-dc20-4654-a6e7-4326383a6dda-catalog-content\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.851716 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b2cad5-dc20-4654-a6e7-4326383a6dda-utilities\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.852150 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8b2cad5-dc20-4654-a6e7-4326383a6dda-catalog-content\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.852251 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8b2cad5-dc20-4654-a6e7-4326383a6dda-utilities\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.870323 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lqj5c\" (UniqueName: \"kubernetes.io/projected/b8b2cad5-dc20-4654-a6e7-4326383a6dda-kube-api-access-lqj5c\") pod \"redhat-marketplace-gfmbn\" (UID: \"b8b2cad5-dc20-4654-a6e7-4326383a6dda\") " pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.953008 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22d7c844-b7a4-47a3-893d-16bc54bdeb76-utilities\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.953075 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22d7c844-b7a4-47a3-893d-16bc54bdeb76-catalog-content\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.953133 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xnhrp\" (UniqueName: \"kubernetes.io/projected/22d7c844-b7a4-47a3-893d-16bc54bdeb76-kube-api-access-xnhrp\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:00 crc kubenswrapper[4702]: I1125 10:38:00.969953 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.054488 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22d7c844-b7a4-47a3-893d-16bc54bdeb76-catalog-content\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.054556 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xnhrp\" (UniqueName: \"kubernetes.io/projected/22d7c844-b7a4-47a3-893d-16bc54bdeb76-kube-api-access-xnhrp\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.054614 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22d7c844-b7a4-47a3-893d-16bc54bdeb76-utilities\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.054924 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/22d7c844-b7a4-47a3-893d-16bc54bdeb76-catalog-content\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.054984 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/22d7c844-b7a4-47a3-893d-16bc54bdeb76-utilities\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.071110 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xnhrp\" (UniqueName: \"kubernetes.io/projected/22d7c844-b7a4-47a3-893d-16bc54bdeb76-kube-api-access-xnhrp\") pod \"redhat-operators-wktsv\" (UID: \"22d7c844-b7a4-47a3-893d-16bc54bdeb76\") " pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:01 crc kubenswrapper[4702]: I1125 10:38:01.157409 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.247248 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wktsv"] Nov 25 10:38:02 crc kubenswrapper[4702]: W1125 10:38:02.254053 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod22d7c844_b7a4_47a3_893d_16bc54bdeb76.slice/crio-97788c02a7efb0b29d415ca970edc734c76ba0ae6808a296b48e5a3884930139 WatchSource:0}: Error finding container 97788c02a7efb0b29d415ca970edc734c76ba0ae6808a296b48e5a3884930139: Status 404 returned error can't find the container with id 97788c02a7efb0b29d415ca970edc734c76ba0ae6808a296b48e5a3884930139 Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.317150 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gfmbn"] Nov 25 10:38:02 crc kubenswrapper[4702]: W1125 10:38:02.329086 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb8b2cad5_dc20_4654_a6e7_4326383a6dda.slice/crio-b404d4888b7e45519da5b43f9beacac12622d3746a3773da00c1bc7a8c74c227 WatchSource:0}: Error finding container b404d4888b7e45519da5b43f9beacac12622d3746a3773da00c1bc7a8c74c227: Status 404 returned error can't find the container with id b404d4888b7e45519da5b43f9beacac12622d3746a3773da00c1bc7a8c74c227 Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.709739 4702 generic.go:334] "Generic (PLEG): container finished" podID="22d7c844-b7a4-47a3-893d-16bc54bdeb76" containerID="562f4aa450c168ad2716cfe9659440efed5f7df6ea7718f91db057c7bca64094" exitCode=0 Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.709845 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wktsv" event={"ID":"22d7c844-b7a4-47a3-893d-16bc54bdeb76","Type":"ContainerDied","Data":"562f4aa450c168ad2716cfe9659440efed5f7df6ea7718f91db057c7bca64094"} Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.710149 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wktsv" event={"ID":"22d7c844-b7a4-47a3-893d-16bc54bdeb76","Type":"ContainerStarted","Data":"97788c02a7efb0b29d415ca970edc734c76ba0ae6808a296b48e5a3884930139"} Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.714062 4702 generic.go:334] "Generic (PLEG): container finished" podID="b8b2cad5-dc20-4654-a6e7-4326383a6dda" containerID="21bdd025219fe47f984651fcc3ed8557f5452bc2a9726be5712592a72a337098" exitCode=0 Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.714133 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"b8b2cad5-dc20-4654-a6e7-4326383a6dda","Type":"ContainerDied","Data":"21bdd025219fe47f984651fcc3ed8557f5452bc2a9726be5712592a72a337098"} Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.714159 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"b8b2cad5-dc20-4654-a6e7-4326383a6dda","Type":"ContainerStarted","Data":"b404d4888b7e45519da5b43f9beacac12622d3746a3773da00c1bc7a8c74c227"} Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.721089 4702 generic.go:334] "Generic (PLEG): container finished" podID="0aeef268-5082-47e1-8bc5-7e66f64509e2" containerID="eb7a6dee1f07b321b456337343eba3a618711ebdb07b3c49b0c6d9015609762c" exitCode=0 Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.721173 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8pk8v" event={"ID":"0aeef268-5082-47e1-8bc5-7e66f64509e2","Type":"ContainerDied","Data":"eb7a6dee1f07b321b456337343eba3a618711ebdb07b3c49b0c6d9015609762c"} Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.727375 4702 generic.go:334] "Generic (PLEG): container finished" podID="efcb9706-ad0f-487f-8aae-e3ebd4b88d7d" containerID="62ff567c6fd8f4446a6c006f44ef2d9c71cf6d89d3a68932f2f5e23df3752ed6" exitCode=0 Nov 25 10:38:02 crc kubenswrapper[4702]: I1125 10:38:02.727413 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsh8t" event={"ID":"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d","Type":"ContainerDied","Data":"62ff567c6fd8f4446a6c006f44ef2d9c71cf6d89d3a68932f2f5e23df3752ed6"} Nov 25 10:38:04 crc kubenswrapper[4702]: I1125 10:38:04.741356 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-nsh8t" event={"ID":"efcb9706-ad0f-487f-8aae-e3ebd4b88d7d","Type":"ContainerStarted","Data":"1b94f9ec29ffe96a3c018bd608e81a1d17bcaac53883029ead1a4f3b424b505f"} Nov 25 10:38:05 crc kubenswrapper[4702]: I1125 10:38:05.748578 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-8pk8v" event={"ID":"0aeef268-5082-47e1-8bc5-7e66f64509e2","Type":"ContainerStarted","Data":"c6471e64494c778d02e2700de8f59325cefff6d274c459fb8dc5a39f7b943c18"} Nov 25 10:38:05 crc kubenswrapper[4702]: I1125 10:38:05.751506 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wktsv" event={"ID":"22d7c844-b7a4-47a3-893d-16bc54bdeb76","Type":"ContainerStarted","Data":"d5f35b45b7d7330a05abd252c8c5a9da6ccf3f335578707ead439f768799d96b"} Nov 25 10:38:05 crc kubenswrapper[4702]: I1125 10:38:05.752946 4702 generic.go:334] "Generic (PLEG): container finished" podID="b8b2cad5-dc20-4654-a6e7-4326383a6dda" containerID="c589d4b0105848f8c3f31e2854aab7a2f455669f7ab59a7b98025c83964cd24c" exitCode=0 Nov 25 10:38:05 crc kubenswrapper[4702]: I1125 10:38:05.753834 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"b8b2cad5-dc20-4654-a6e7-4326383a6dda","Type":"ContainerDied","Data":"c589d4b0105848f8c3f31e2854aab7a2f455669f7ab59a7b98025c83964cd24c"} Nov 25 10:38:05 crc kubenswrapper[4702]: I1125 10:38:05.768303 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-nsh8t" podStartSLOduration=3.893194413 podStartE2EDuration="7.768281309s" podCreationTimestamp="2025-11-25 10:37:58 +0000 UTC" firstStartedPulling="2025-11-25 10:37:59.696034137 +0000 UTC m=+377.062629826" lastFinishedPulling="2025-11-25 10:38:03.571121033 +0000 UTC m=+380.937716722" observedRunningTime="2025-11-25 10:38:04.757343785 +0000 UTC m=+382.123939474" watchObservedRunningTime="2025-11-25 10:38:05.768281309 +0000 UTC m=+383.134876998" Nov 25 10:38:05 crc kubenswrapper[4702]: I1125 10:38:05.769667 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-8pk8v" podStartSLOduration=2.924472596 podStartE2EDuration="7.769653339s" podCreationTimestamp="2025-11-25 10:37:58 +0000 UTC" firstStartedPulling="2025-11-25 10:37:59.694103382 +0000 UTC m=+377.060699071" lastFinishedPulling="2025-11-25 10:38:04.539284135 +0000 UTC m=+381.905879814" observedRunningTime="2025-11-25 10:38:05.768154436 +0000 UTC m=+383.134750125" watchObservedRunningTime="2025-11-25 10:38:05.769653339 +0000 UTC m=+383.136249038" Nov 25 10:38:06 crc kubenswrapper[4702]: I1125 10:38:06.758458 4702 generic.go:334] "Generic (PLEG): container finished" podID="22d7c844-b7a4-47a3-893d-16bc54bdeb76" containerID="d5f35b45b7d7330a05abd252c8c5a9da6ccf3f335578707ead439f768799d96b" exitCode=0 Nov 25 10:38:06 crc kubenswrapper[4702]: I1125 10:38:06.758515 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wktsv" event={"ID":"22d7c844-b7a4-47a3-893d-16bc54bdeb76","Type":"ContainerDied","Data":"d5f35b45b7d7330a05abd252c8c5a9da6ccf3f335578707ead439f768799d96b"} Nov 25 10:38:06 crc kubenswrapper[4702]: I1125 10:38:06.763500 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gfmbn" event={"ID":"b8b2cad5-dc20-4654-a6e7-4326383a6dda","Type":"ContainerStarted","Data":"43fc0fe9cf517cc9008ca366cf78af99f901f1a9e686d1635d9b3dface137da2"} Nov 25 10:38:06 crc kubenswrapper[4702]: I1125 10:38:06.795834 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gfmbn" podStartSLOduration=3.063380941 podStartE2EDuration="6.795815541s" podCreationTimestamp="2025-11-25 10:38:00 +0000 UTC" firstStartedPulling="2025-11-25 10:38:02.715025869 +0000 UTC m=+380.081621558" lastFinishedPulling="2025-11-25 10:38:06.447460469 +0000 UTC m=+383.814056158" observedRunningTime="2025-11-25 10:38:06.795461851 +0000 UTC m=+384.162057560" watchObservedRunningTime="2025-11-25 10:38:06.795815541 +0000 UTC m=+384.162411230" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.539588 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.540319 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.583301 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.781614 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wktsv" event={"ID":"22d7c844-b7a4-47a3-893d-16bc54bdeb76","Type":"ContainerStarted","Data":"1e50e9627bdfb3c271a6e2898a3482a79fd3f1f5527ed4d3e45282e2d91e947f"} Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.782151 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.782178 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.801040 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wktsv" podStartSLOduration=3.217935373 podStartE2EDuration="8.801022839s" podCreationTimestamp="2025-11-25 10:38:00 +0000 UTC" firstStartedPulling="2025-11-25 10:38:02.711167338 +0000 UTC m=+380.077763027" lastFinishedPulling="2025-11-25 10:38:08.294254804 +0000 UTC m=+385.660850493" observedRunningTime="2025-11-25 10:38:08.800593587 +0000 UTC m=+386.167189276" watchObservedRunningTime="2025-11-25 10:38:08.801022839 +0000 UTC m=+386.167618528" Nov 25 10:38:08 crc kubenswrapper[4702]: I1125 10:38:08.822635 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:38:09 crc kubenswrapper[4702]: I1125 10:38:09.827539 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-nsh8t" Nov 25 10:38:10 crc kubenswrapper[4702]: I1125 10:38:10.970144 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:10 crc kubenswrapper[4702]: I1125 10:38:10.970460 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:11 crc kubenswrapper[4702]: I1125 10:38:11.005481 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:11 crc kubenswrapper[4702]: I1125 10:38:11.157830 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:11 crc kubenswrapper[4702]: I1125 10:38:11.158013 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:11 crc kubenswrapper[4702]: I1125 10:38:11.850491 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gfmbn" Nov 25 10:38:12 crc kubenswrapper[4702]: I1125 10:38:12.202400 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wktsv" podUID="22d7c844-b7a4-47a3-893d-16bc54bdeb76" containerName="registry-server" probeResult="failure" output=< Nov 25 10:38:12 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:38:12 crc kubenswrapper[4702]: > Nov 25 10:38:18 crc kubenswrapper[4702]: I1125 10:38:18.577279 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-8pk8v" Nov 25 10:38:21 crc kubenswrapper[4702]: I1125 10:38:21.194918 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:21 crc kubenswrapper[4702]: I1125 10:38:21.232306 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wktsv" Nov 25 10:38:22 crc kubenswrapper[4702]: I1125 10:38:22.769972 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" podUID="266ce950-00f4-440d-9196-6a4ab41404ea" containerName="oauth-openshift" containerID="cri-o://85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367" gracePeriod=15 Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.110206 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.140304 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-dmndm"] Nov 25 10:38:23 crc kubenswrapper[4702]: E1125 10:38:23.140560 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="266ce950-00f4-440d-9196-6a4ab41404ea" containerName="oauth-openshift" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.140584 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="266ce950-00f4-440d-9196-6a4ab41404ea" containerName="oauth-openshift" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.140728 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="266ce950-00f4-440d-9196-6a4ab41404ea" containerName="oauth-openshift" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.141215 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.160195 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-dmndm"] Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.232847 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5jj9\" (UniqueName: \"kubernetes.io/projected/266ce950-00f4-440d-9196-6a4ab41404ea-kube-api-access-n5jj9\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.233232 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-audit-policies\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.233257 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-provider-selection\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.233720 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.233794 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-idp-0-file-data\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.233826 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-error\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.233864 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/266ce950-00f4-440d-9196-6a4ab41404ea-audit-dir\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234356 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-ocp-branding-template\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234403 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-serving-cert\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234429 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-login\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234459 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-trusted-ca-bundle\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234480 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-service-ca\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234502 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-session\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234524 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-cliconfig\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234560 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-router-certs\") pod \"266ce950-00f4-440d-9196-6a4ab41404ea\" (UID: \"266ce950-00f4-440d-9196-6a4ab41404ea\") " Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234699 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234733 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234754 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nszlg\" (UniqueName: \"kubernetes.io/projected/54d73886-8f08-48f6-bd87-c81df2d1fe17-kube-api-access-nszlg\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234773 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234790 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-audit-policies\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.234840 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/266ce950-00f4-440d-9196-6a4ab41404ea-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235089 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235134 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235167 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235194 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/54d73886-8f08-48f6-bd87-c81df2d1fe17-audit-dir\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235217 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235241 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235272 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235303 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235325 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235321 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235374 4702 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-audit-policies\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235390 4702 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/266ce950-00f4-440d-9196-6a4ab41404ea-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.235890 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.236218 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.240208 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.240763 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/266ce950-00f4-440d-9196-6a4ab41404ea-kube-api-access-n5jj9" (OuterVolumeSpecName: "kube-api-access-n5jj9") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "kube-api-access-n5jj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.240787 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.240956 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.241241 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.241483 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.241692 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.241827 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.245275 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "266ce950-00f4-440d-9196-6a4ab41404ea" (UID: "266ce950-00f4-440d-9196-6a4ab41404ea"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336467 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-audit-policies\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336541 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336565 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336586 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336615 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/54d73886-8f08-48f6-bd87-c81df2d1fe17-audit-dir\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336642 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336668 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336698 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336720 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336739 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336765 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336786 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336807 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336825 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nszlg\" (UniqueName: \"kubernetes.io/projected/54d73886-8f08-48f6-bd87-c81df2d1fe17-kube-api-access-nszlg\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336862 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5jj9\" (UniqueName: \"kubernetes.io/projected/266ce950-00f4-440d-9196-6a4ab41404ea-kube-api-access-n5jj9\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336873 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336884 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336895 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336942 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336953 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336963 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336971 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336980 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336989 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.336999 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.337008 4702 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/266ce950-00f4-440d-9196-6a4ab41404ea-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.337524 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-cliconfig\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.337644 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-service-ca\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.338074 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/54d73886-8f08-48f6-bd87-c81df2d1fe17-audit-dir\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.338724 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-audit-policies\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.339164 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.340281 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-error\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.341673 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-session\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.341615 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.341872 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-serving-cert\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.342748 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.343553 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.343666 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-user-template-login\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.344991 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/54d73886-8f08-48f6-bd87-c81df2d1fe17-v4-0-config-system-router-certs\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.353420 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nszlg\" (UniqueName: \"kubernetes.io/projected/54d73886-8f08-48f6-bd87-c81df2d1fe17-kube-api-access-nszlg\") pod \"oauth-openshift-58d4f98775-dmndm\" (UID: \"54d73886-8f08-48f6-bd87-c81df2d1fe17\") " pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.455373 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.875281 4702 generic.go:334] "Generic (PLEG): container finished" podID="266ce950-00f4-440d-9196-6a4ab41404ea" containerID="85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367" exitCode=0 Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.875338 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" event={"ID":"266ce950-00f4-440d-9196-6a4ab41404ea","Type":"ContainerDied","Data":"85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367"} Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.875378 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" event={"ID":"266ce950-00f4-440d-9196-6a4ab41404ea","Type":"ContainerDied","Data":"56e050f7cde6ec02649b6ccfd6c224544da6b647abf5750b6adf8e9c0461eba6"} Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.875398 4702 scope.go:117] "RemoveContainer" containerID="85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.875394 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-w2kdw" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.884463 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-58d4f98775-dmndm"] Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.954698 4702 scope.go:117] "RemoveContainer" containerID="85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367" Nov 25 10:38:23 crc kubenswrapper[4702]: E1125 10:38:23.956138 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367\": container with ID starting with 85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367 not found: ID does not exist" containerID="85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.956242 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367"} err="failed to get container status \"85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367\": rpc error: code = NotFound desc = could not find container \"85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367\": container with ID starting with 85b989a3cb92931950977cb9959e38b01e2bb5270e72e31fa858838575124367 not found: ID does not exist" Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.958079 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-w2kdw"] Nov 25 10:38:23 crc kubenswrapper[4702]: I1125 10:38:23.963182 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-w2kdw"] Nov 25 10:38:24 crc kubenswrapper[4702]: I1125 10:38:24.882573 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" event={"ID":"54d73886-8f08-48f6-bd87-c81df2d1fe17","Type":"ContainerStarted","Data":"b6b603d351614aa8d87ea9d62d43835c1389d66a2a82e904b2fe879d7ce4990e"} Nov 25 10:38:24 crc kubenswrapper[4702]: I1125 10:38:24.882616 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" event={"ID":"54d73886-8f08-48f6-bd87-c81df2d1fe17","Type":"ContainerStarted","Data":"16d7b6eed10d3602e1a93b15ed3ae03ce67e8cf0952dfec7dffc8d6206cbb742"} Nov 25 10:38:24 crc kubenswrapper[4702]: I1125 10:38:24.883702 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:24 crc kubenswrapper[4702]: I1125 10:38:24.893351 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" Nov 25 10:38:24 crc kubenswrapper[4702]: I1125 10:38:24.907813 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-58d4f98775-dmndm" podStartSLOduration=27.907796107 podStartE2EDuration="27.907796107s" podCreationTimestamp="2025-11-25 10:37:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:38:24.904945855 +0000 UTC m=+402.271541544" watchObservedRunningTime="2025-11-25 10:38:24.907796107 +0000 UTC m=+402.274391796" Nov 25 10:38:25 crc kubenswrapper[4702]: I1125 10:38:25.408648 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="266ce950-00f4-440d-9196-6a4ab41404ea" path="/var/lib/kubelet/pods/266ce950-00f4-440d-9196-6a4ab41404ea/volumes" Nov 25 10:38:43 crc kubenswrapper[4702]: I1125 10:38:43.591104 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:38:43 crc kubenswrapper[4702]: I1125 10:38:43.591829 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.161985 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ldf5z"] Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.164641 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.176954 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ldf5z"] Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.310801 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.310864 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cd6ec2bb-68be-43f0-b347-b7972ad21d94-registry-certificates\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.310886 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cd6ec2bb-68be-43f0-b347-b7972ad21d94-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.310940 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-bound-sa-token\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.310964 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-registry-tls\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.311135 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd6ec2bb-68be-43f0-b347-b7972ad21d94-trusted-ca\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.311160 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c974v\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-kube-api-access-c974v\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.311182 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cd6ec2bb-68be-43f0-b347-b7972ad21d94-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.334515 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412275 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c974v\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-kube-api-access-c974v\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412360 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cd6ec2bb-68be-43f0-b347-b7972ad21d94-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412414 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cd6ec2bb-68be-43f0-b347-b7972ad21d94-registry-certificates\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412446 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cd6ec2bb-68be-43f0-b347-b7972ad21d94-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412501 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-bound-sa-token\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412537 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-registry-tls\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.412572 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd6ec2bb-68be-43f0-b347-b7972ad21d94-trusted-ca\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.413338 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/cd6ec2bb-68be-43f0-b347-b7972ad21d94-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.414046 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/cd6ec2bb-68be-43f0-b347-b7972ad21d94-trusted-ca\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.414686 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/cd6ec2bb-68be-43f0-b347-b7972ad21d94-registry-certificates\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.419118 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-registry-tls\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.420203 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/cd6ec2bb-68be-43f0-b347-b7972ad21d94-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.433089 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c974v\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-kube-api-access-c974v\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.444081 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/cd6ec2bb-68be-43f0-b347-b7972ad21d94-bound-sa-token\") pod \"image-registry-66df7c8f76-ldf5z\" (UID: \"cd6ec2bb-68be-43f0-b347-b7972ad21d94\") " pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.531959 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:04 crc kubenswrapper[4702]: I1125 10:39:04.734863 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ldf5z"] Nov 25 10:39:05 crc kubenswrapper[4702]: I1125 10:39:05.113688 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" event={"ID":"cd6ec2bb-68be-43f0-b347-b7972ad21d94","Type":"ContainerStarted","Data":"9fc4837d7b13f84e95367986320f1e13ca6e31593715b46440472b9dba702c87"} Nov 25 10:39:05 crc kubenswrapper[4702]: I1125 10:39:05.114054 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:05 crc kubenswrapper[4702]: I1125 10:39:05.114067 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" event={"ID":"cd6ec2bb-68be-43f0-b347-b7972ad21d94","Type":"ContainerStarted","Data":"89462b4501b1097bdfe6b81c09376c8f276accdf9b02f4c49ea5c99968b07005"} Nov 25 10:39:05 crc kubenswrapper[4702]: I1125 10:39:05.134128 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" podStartSLOduration=1.134107566 podStartE2EDuration="1.134107566s" podCreationTimestamp="2025-11-25 10:39:04 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:39:05.131549882 +0000 UTC m=+442.498145571" watchObservedRunningTime="2025-11-25 10:39:05.134107566 +0000 UTC m=+442.500703245" Nov 25 10:39:13 crc kubenswrapper[4702]: I1125 10:39:13.591164 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:39:13 crc kubenswrapper[4702]: I1125 10:39:13.591431 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:39:24 crc kubenswrapper[4702]: I1125 10:39:24.540476 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-ldf5z" Nov 25 10:39:24 crc kubenswrapper[4702]: I1125 10:39:24.603070 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zbgbq"] Nov 25 10:39:43 crc kubenswrapper[4702]: I1125 10:39:43.591260 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:39:43 crc kubenswrapper[4702]: I1125 10:39:43.591849 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:39:43 crc kubenswrapper[4702]: I1125 10:39:43.591943 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:39:43 crc kubenswrapper[4702]: I1125 10:39:43.592925 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3710cfa79b28ce8750da845c642c667802a6fa3de7d52c73daec57f071ae2a10"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:39:43 crc kubenswrapper[4702]: I1125 10:39:43.593005 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://3710cfa79b28ce8750da845c642c667802a6fa3de7d52c73daec57f071ae2a10" gracePeriod=600 Nov 25 10:39:44 crc kubenswrapper[4702]: I1125 10:39:44.325609 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="3710cfa79b28ce8750da845c642c667802a6fa3de7d52c73daec57f071ae2a10" exitCode=0 Nov 25 10:39:44 crc kubenswrapper[4702]: I1125 10:39:44.325692 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"3710cfa79b28ce8750da845c642c667802a6fa3de7d52c73daec57f071ae2a10"} Nov 25 10:39:44 crc kubenswrapper[4702]: I1125 10:39:44.326170 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"ec26713a85198746880277f7b0b371a03ca59293010ad77d43053fb85009ce0f"} Nov 25 10:39:44 crc kubenswrapper[4702]: I1125 10:39:44.326217 4702 scope.go:117] "RemoveContainer" containerID="7cca62511f4e1f3fa89281ddc6c253049a94a7b1830e21d1d5c0a4f6eb16e7b7" Nov 25 10:39:49 crc kubenswrapper[4702]: I1125 10:39:49.652455 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" podUID="74207563-11c3-4723-8375-7a61d6f27733" containerName="registry" containerID="cri-o://6ad705c16ce2020397ada49c92c3f03d19d169c170b7176ed07c722903feec16" gracePeriod=30 Nov 25 10:39:49 crc kubenswrapper[4702]: I1125 10:39:49.899442 4702 patch_prober.go:28] interesting pod/image-registry-697d97f7c8-zbgbq container/registry namespace/openshift-image-registry: Readiness probe status=failure output="Get \"https://10.217.0.28:5000/healthz\": dial tcp 10.217.0.28:5000: connect: connection refused" start-of-body= Nov 25 10:39:49 crc kubenswrapper[4702]: I1125 10:39:49.899884 4702 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" podUID="74207563-11c3-4723-8375-7a61d6f27733" containerName="registry" probeResult="failure" output="Get \"https://10.217.0.28:5000/healthz\": dial tcp 10.217.0.28:5000: connect: connection refused" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.363055 4702 generic.go:334] "Generic (PLEG): container finished" podID="74207563-11c3-4723-8375-7a61d6f27733" containerID="6ad705c16ce2020397ada49c92c3f03d19d169c170b7176ed07c722903feec16" exitCode=0 Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.363112 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" event={"ID":"74207563-11c3-4723-8375-7a61d6f27733","Type":"ContainerDied","Data":"6ad705c16ce2020397ada49c92c3f03d19d169c170b7176ed07c722903feec16"} Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.598104 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741574 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74207563-11c3-4723-8375-7a61d6f27733-ca-trust-extracted\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741632 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-registry-certificates\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741664 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-trusted-ca\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741712 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-bound-sa-token\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741744 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-registry-tls\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741891 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.741978 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74207563-11c3-4723-8375-7a61d6f27733-installation-pull-secrets\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.742021 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5lxns\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-kube-api-access-5lxns\") pod \"74207563-11c3-4723-8375-7a61d6f27733\" (UID: \"74207563-11c3-4723-8375-7a61d6f27733\") " Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.742752 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.742835 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.748536 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.754030 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/74207563-11c3-4723-8375-7a61d6f27733-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.754269 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-kube-api-access-5lxns" (OuterVolumeSpecName: "kube-api-access-5lxns") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "kube-api-access-5lxns". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.755086 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.755584 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.774175 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/74207563-11c3-4723-8375-7a61d6f27733-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "74207563-11c3-4723-8375-7a61d6f27733" (UID: "74207563-11c3-4723-8375-7a61d6f27733"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843746 4702 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/74207563-11c3-4723-8375-7a61d6f27733-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843803 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5lxns\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-kube-api-access-5lxns\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843815 4702 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/74207563-11c3-4723-8375-7a61d6f27733-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843825 4702 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-registry-certificates\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843833 4702 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/74207563-11c3-4723-8375-7a61d6f27733-trusted-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843841 4702 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-bound-sa-token\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:50 crc kubenswrapper[4702]: I1125 10:39:50.843849 4702 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/74207563-11c3-4723-8375-7a61d6f27733-registry-tls\") on node \"crc\" DevicePath \"\"" Nov 25 10:39:51 crc kubenswrapper[4702]: I1125 10:39:51.372769 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" event={"ID":"74207563-11c3-4723-8375-7a61d6f27733","Type":"ContainerDied","Data":"01e54833f9ebe610ec0d52ff1c049c39ae5e81d4a63583b237e5433c88464db8"} Nov 25 10:39:51 crc kubenswrapper[4702]: I1125 10:39:51.372810 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-zbgbq" Nov 25 10:39:51 crc kubenswrapper[4702]: I1125 10:39:51.373237 4702 scope.go:117] "RemoveContainer" containerID="6ad705c16ce2020397ada49c92c3f03d19d169c170b7176ed07c722903feec16" Nov 25 10:39:51 crc kubenswrapper[4702]: I1125 10:39:51.399242 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zbgbq"] Nov 25 10:39:51 crc kubenswrapper[4702]: I1125 10:39:51.409694 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-zbgbq"] Nov 25 10:39:53 crc kubenswrapper[4702]: I1125 10:39:53.414527 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="74207563-11c3-4723-8375-7a61d6f27733" path="/var/lib/kubelet/pods/74207563-11c3-4723-8375-7a61d6f27733/volumes" Nov 25 10:40:43 crc kubenswrapper[4702]: I1125 10:40:43.554221 4702 scope.go:117] "RemoveContainer" containerID="b1df9640c1e20fbca1e8196d190a639932226cc0c7fb0a6a722287217ef333c3" Nov 25 10:41:43 crc kubenswrapper[4702]: I1125 10:41:43.591385 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:41:43 crc kubenswrapper[4702]: I1125 10:41:43.591820 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:42:13 crc kubenswrapper[4702]: I1125 10:42:13.590976 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:42:13 crc kubenswrapper[4702]: I1125 10:42:13.591436 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:42:43 crc kubenswrapper[4702]: I1125 10:42:43.591503 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:42:43 crc kubenswrapper[4702]: I1125 10:42:43.592316 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:42:43 crc kubenswrapper[4702]: I1125 10:42:43.592383 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:42:43 crc kubenswrapper[4702]: I1125 10:42:43.593158 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"ec26713a85198746880277f7b0b371a03ca59293010ad77d43053fb85009ce0f"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:42:43 crc kubenswrapper[4702]: I1125 10:42:43.593265 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://ec26713a85198746880277f7b0b371a03ca59293010ad77d43053fb85009ce0f" gracePeriod=600 Nov 25 10:42:44 crc kubenswrapper[4702]: I1125 10:42:44.386306 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="ec26713a85198746880277f7b0b371a03ca59293010ad77d43053fb85009ce0f" exitCode=0 Nov 25 10:42:44 crc kubenswrapper[4702]: I1125 10:42:44.386397 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"ec26713a85198746880277f7b0b371a03ca59293010ad77d43053fb85009ce0f"} Nov 25 10:42:44 crc kubenswrapper[4702]: I1125 10:42:44.386973 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"7918e86b322b2cb7cfa46c8cc2dfa0c27b9015c392a35a3637cb12006c4d3205"} Nov 25 10:42:44 crc kubenswrapper[4702]: I1125 10:42:44.387005 4702 scope.go:117] "RemoveContainer" containerID="3710cfa79b28ce8750da845c642c667802a6fa3de7d52c73daec57f071ae2a10" Nov 25 10:43:43 crc kubenswrapper[4702]: I1125 10:43:43.628653 4702 scope.go:117] "RemoveContainer" containerID="0952bcbb09a8e5a3cc29f48b979545b94439e0ab9c5a31f3ea6c46a67cfe895f" Nov 25 10:43:43 crc kubenswrapper[4702]: I1125 10:43:43.649784 4702 scope.go:117] "RemoveContainer" containerID="2723f615615f8604f6cc5163ee2998330b109f2534b129206c76499d78d1c436" Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.762079 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n"] Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.762924 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" containerID="cri-o://d0ab832468c56371451f4a48201768e9ea178e7903a3fe19c3ada00817782f46" gracePeriod=30 Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.765430 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kfhfz"] Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.765654 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" podUID="39e51e54-1814-4e9d-a6e0-42657e63a2c5" containerName="controller-manager" containerID="cri-o://6e6710fb8fd130390654694eee50a1fdfaa138aa109365b9f656e3db821e0ed9" gracePeriod=30 Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.885483 4702 generic.go:334] "Generic (PLEG): container finished" podID="39e51e54-1814-4e9d-a6e0-42657e63a2c5" containerID="6e6710fb8fd130390654694eee50a1fdfaa138aa109365b9f656e3db821e0ed9" exitCode=0 Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.885545 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" event={"ID":"39e51e54-1814-4e9d-a6e0-42657e63a2c5","Type":"ContainerDied","Data":"6e6710fb8fd130390654694eee50a1fdfaa138aa109365b9f656e3db821e0ed9"} Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.886715 4702 generic.go:334] "Generic (PLEG): container finished" podID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerID="d0ab832468c56371451f4a48201768e9ea178e7903a3fe19c3ada00817782f46" exitCode=0 Nov 25 10:44:16 crc kubenswrapper[4702]: I1125 10:44:16.886738 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" event={"ID":"9402a523-ed8c-499b-bac9-0a0d6598ef52","Type":"ContainerDied","Data":"d0ab832468c56371451f4a48201768e9ea178e7903a3fe19c3ada00817782f46"} Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.127381 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.135091 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.147610 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39e51e54-1814-4e9d-a6e0-42657e63a2c5-serving-cert\") pod \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.147770 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-config\") pod \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.147802 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-config\") pod \"9402a523-ed8c-499b-bac9-0a0d6598ef52\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.147825 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-proxy-ca-bundles\") pod \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.147895 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6r9w4\" (UniqueName: \"kubernetes.io/projected/39e51e54-1814-4e9d-a6e0-42657e63a2c5-kube-api-access-6r9w4\") pod \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.147950 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-client-ca\") pod \"9402a523-ed8c-499b-bac9-0a0d6598ef52\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.148036 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7vxs\" (UniqueName: \"kubernetes.io/projected/9402a523-ed8c-499b-bac9-0a0d6598ef52-kube-api-access-h7vxs\") pod \"9402a523-ed8c-499b-bac9-0a0d6598ef52\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.148078 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9402a523-ed8c-499b-bac9-0a0d6598ef52-serving-cert\") pod \"9402a523-ed8c-499b-bac9-0a0d6598ef52\" (UID: \"9402a523-ed8c-499b-bac9-0a0d6598ef52\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.148112 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-client-ca\") pod \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\" (UID: \"39e51e54-1814-4e9d-a6e0-42657e63a2c5\") " Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.149250 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-client-ca" (OuterVolumeSpecName: "client-ca") pod "39e51e54-1814-4e9d-a6e0-42657e63a2c5" (UID: "39e51e54-1814-4e9d-a6e0-42657e63a2c5"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.149264 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "39e51e54-1814-4e9d-a6e0-42657e63a2c5" (UID: "39e51e54-1814-4e9d-a6e0-42657e63a2c5"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.149451 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-client-ca" (OuterVolumeSpecName: "client-ca") pod "9402a523-ed8c-499b-bac9-0a0d6598ef52" (UID: "9402a523-ed8c-499b-bac9-0a0d6598ef52"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.150213 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-config" (OuterVolumeSpecName: "config") pod "39e51e54-1814-4e9d-a6e0-42657e63a2c5" (UID: "39e51e54-1814-4e9d-a6e0-42657e63a2c5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.152673 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-config" (OuterVolumeSpecName: "config") pod "9402a523-ed8c-499b-bac9-0a0d6598ef52" (UID: "9402a523-ed8c-499b-bac9-0a0d6598ef52"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.154735 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/39e51e54-1814-4e9d-a6e0-42657e63a2c5-kube-api-access-6r9w4" (OuterVolumeSpecName: "kube-api-access-6r9w4") pod "39e51e54-1814-4e9d-a6e0-42657e63a2c5" (UID: "39e51e54-1814-4e9d-a6e0-42657e63a2c5"). InnerVolumeSpecName "kube-api-access-6r9w4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.154812 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9402a523-ed8c-499b-bac9-0a0d6598ef52-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9402a523-ed8c-499b-bac9-0a0d6598ef52" (UID: "9402a523-ed8c-499b-bac9-0a0d6598ef52"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.154895 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9402a523-ed8c-499b-bac9-0a0d6598ef52-kube-api-access-h7vxs" (OuterVolumeSpecName: "kube-api-access-h7vxs") pod "9402a523-ed8c-499b-bac9-0a0d6598ef52" (UID: "9402a523-ed8c-499b-bac9-0a0d6598ef52"). InnerVolumeSpecName "kube-api-access-h7vxs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.155182 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/39e51e54-1814-4e9d-a6e0-42657e63a2c5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "39e51e54-1814-4e9d-a6e0-42657e63a2c5" (UID: "39e51e54-1814-4e9d-a6e0-42657e63a2c5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249550 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6r9w4\" (UniqueName: \"kubernetes.io/projected/39e51e54-1814-4e9d-a6e0-42657e63a2c5-kube-api-access-6r9w4\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249600 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7vxs\" (UniqueName: \"kubernetes.io/projected/9402a523-ed8c-499b-bac9-0a0d6598ef52-kube-api-access-h7vxs\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249615 4702 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249628 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9402a523-ed8c-499b-bac9-0a0d6598ef52-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249639 4702 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-client-ca\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249649 4702 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/39e51e54-1814-4e9d-a6e0-42657e63a2c5-serving-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249661 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249676 4702 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9402a523-ed8c-499b-bac9-0a0d6598ef52-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.249686 4702 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/39e51e54-1814-4e9d-a6e0-42657e63a2c5-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.893613 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.893561 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-kfhfz" event={"ID":"39e51e54-1814-4e9d-a6e0-42657e63a2c5","Type":"ContainerDied","Data":"9f8ea866307115a6e976a20f4f451ebf864623bbd502164edfe4f220bd54a1ab"} Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.894264 4702 scope.go:117] "RemoveContainer" containerID="6e6710fb8fd130390654694eee50a1fdfaa138aa109365b9f656e3db821e0ed9" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.895988 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" event={"ID":"9402a523-ed8c-499b-bac9-0a0d6598ef52","Type":"ContainerDied","Data":"67bc7acef838e847b451eb7a035523d0cac6e639a66286dcbcc54f0aecbd2427"} Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.896155 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.911554 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n"] Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.916961 4702 scope.go:117] "RemoveContainer" containerID="d0ab832468c56371451f4a48201768e9ea178e7903a3fe19c3ada00817782f46" Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.925831 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-n4r8n"] Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.930278 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kfhfz"] Nov 25 10:44:17 crc kubenswrapper[4702]: I1125 10:44:17.935305 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-kfhfz"] Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.516739 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7"] Nov 25 10:44:18 crc kubenswrapper[4702]: E1125 10:44:18.517004 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517024 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" Nov 25 10:44:18 crc kubenswrapper[4702]: E1125 10:44:18.517041 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="74207563-11c3-4723-8375-7a61d6f27733" containerName="registry" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517050 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="74207563-11c3-4723-8375-7a61d6f27733" containerName="registry" Nov 25 10:44:18 crc kubenswrapper[4702]: E1125 10:44:18.517060 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="39e51e54-1814-4e9d-a6e0-42657e63a2c5" containerName="controller-manager" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517068 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="39e51e54-1814-4e9d-a6e0-42657e63a2c5" containerName="controller-manager" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517175 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" containerName="route-controller-manager" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517191 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="39e51e54-1814-4e9d-a6e0-42657e63a2c5" containerName="controller-manager" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517198 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="74207563-11c3-4723-8375-7a61d6f27733" containerName="registry" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.517637 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.522867 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.523226 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.524489 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.524588 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.524734 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.524533 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.534769 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7"] Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.564407 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc574556-c479-4d0b-8533-315768e313e4-client-ca\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.564444 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc574556-c479-4d0b-8533-315768e313e4-serving-cert\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.564471 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-npbxz\" (UniqueName: \"kubernetes.io/projected/cc574556-c479-4d0b-8533-315768e313e4-kube-api-access-npbxz\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.564503 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc574556-c479-4d0b-8533-315768e313e4-config\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.665628 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc574556-c479-4d0b-8533-315768e313e4-client-ca\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.665680 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc574556-c479-4d0b-8533-315768e313e4-serving-cert\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.665720 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-npbxz\" (UniqueName: \"kubernetes.io/projected/cc574556-c479-4d0b-8533-315768e313e4-kube-api-access-npbxz\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.665763 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc574556-c479-4d0b-8533-315768e313e4-config\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.666722 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cc574556-c479-4d0b-8533-315768e313e4-client-ca\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.666999 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cc574556-c479-4d0b-8533-315768e313e4-config\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.671115 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cc574556-c479-4d0b-8533-315768e313e4-serving-cert\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.684029 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-npbxz\" (UniqueName: \"kubernetes.io/projected/cc574556-c479-4d0b-8533-315768e313e4-kube-api-access-npbxz\") pod \"route-controller-manager-554d5854cf-rlnc7\" (UID: \"cc574556-c479-4d0b-8533-315768e313e4\") " pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.689987 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-74fbd4d896-f6scd"] Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.690850 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.694221 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.694396 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.694517 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.694935 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.695111 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.699412 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.700403 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.703020 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74fbd4d896-f6scd"] Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.766576 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/560288bd-088b-49f1-a89b-7b35eba4ae35-serving-cert\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.766642 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-config\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.766672 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-proxy-ca-bundles\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.766853 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cphs9\" (UniqueName: \"kubernetes.io/projected/560288bd-088b-49f1-a89b-7b35eba4ae35-kube-api-access-cphs9\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.766980 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-client-ca\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.834558 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.867948 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cphs9\" (UniqueName: \"kubernetes.io/projected/560288bd-088b-49f1-a89b-7b35eba4ae35-kube-api-access-cphs9\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.868013 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-client-ca\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.868076 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/560288bd-088b-49f1-a89b-7b35eba4ae35-serving-cert\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.868107 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-config\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.868142 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-proxy-ca-bundles\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.869245 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-client-ca\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.869397 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-proxy-ca-bundles\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.869943 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/560288bd-088b-49f1-a89b-7b35eba4ae35-config\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.874842 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/560288bd-088b-49f1-a89b-7b35eba4ae35-serving-cert\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:18 crc kubenswrapper[4702]: I1125 10:44:18.887200 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cphs9\" (UniqueName: \"kubernetes.io/projected/560288bd-088b-49f1-a89b-7b35eba4ae35-kube-api-access-cphs9\") pod \"controller-manager-74fbd4d896-f6scd\" (UID: \"560288bd-088b-49f1-a89b-7b35eba4ae35\") " pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.025796 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.039762 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7"] Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.248820 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-74fbd4d896-f6scd"] Nov 25 10:44:19 crc kubenswrapper[4702]: W1125 10:44:19.256504 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod560288bd_088b_49f1_a89b_7b35eba4ae35.slice/crio-0f13490f92f0732c88b5fdb2c570586851bf4fc5da583bb7d9d440496315671d WatchSource:0}: Error finding container 0f13490f92f0732c88b5fdb2c570586851bf4fc5da583bb7d9d440496315671d: Status 404 returned error can't find the container with id 0f13490f92f0732c88b5fdb2c570586851bf4fc5da583bb7d9d440496315671d Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.411252 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="39e51e54-1814-4e9d-a6e0-42657e63a2c5" path="/var/lib/kubelet/pods/39e51e54-1814-4e9d-a6e0-42657e63a2c5/volumes" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.411952 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9402a523-ed8c-499b-bac9-0a0d6598ef52" path="/var/lib/kubelet/pods/9402a523-ed8c-499b-bac9-0a0d6598ef52/volumes" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.921238 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" event={"ID":"560288bd-088b-49f1-a89b-7b35eba4ae35","Type":"ContainerStarted","Data":"acd87dfa4b7bc0a4e482c12477682e71ef9a4b95249e8d03e799aea627c32612"} Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.921612 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.921628 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" event={"ID":"560288bd-088b-49f1-a89b-7b35eba4ae35","Type":"ContainerStarted","Data":"0f13490f92f0732c88b5fdb2c570586851bf4fc5da583bb7d9d440496315671d"} Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.924582 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" event={"ID":"cc574556-c479-4d0b-8533-315768e313e4","Type":"ContainerStarted","Data":"3348241e4599b771a7660b796fbb4c786a21c345f3091f8f3342cd380d5672e4"} Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.924607 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" event={"ID":"cc574556-c479-4d0b-8533-315768e313e4","Type":"ContainerStarted","Data":"a4920fb0388ad1270705be35992d53384cc73de8d2b15990d7a07b4d8bb7e695"} Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.924745 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.926352 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.929067 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.948796 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-74fbd4d896-f6scd" podStartSLOduration=3.94876197 podStartE2EDuration="3.94876197s" podCreationTimestamp="2025-11-25 10:44:16 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:44:19.948352638 +0000 UTC m=+757.314948327" watchObservedRunningTime="2025-11-25 10:44:19.94876197 +0000 UTC m=+757.315357659" Nov 25 10:44:19 crc kubenswrapper[4702]: I1125 10:44:19.973565 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-554d5854cf-rlnc7" podStartSLOduration=1.9735434029999999 podStartE2EDuration="1.973543403s" podCreationTimestamp="2025-11-25 10:44:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:44:19.969600109 +0000 UTC m=+757.336195808" watchObservedRunningTime="2025-11-25 10:44:19.973543403 +0000 UTC m=+757.340139092" Nov 25 10:44:25 crc kubenswrapper[4702]: I1125 10:44:25.163514 4702 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.587941 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-h8hn4"] Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.590779 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-controller" containerID="cri-o://ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.590947 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-node" containerID="cri-o://73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.591020 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-acl-logging" containerID="cri-o://6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.591005 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="northd" containerID="cri-o://c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.591107 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="sbdb" containerID="cri-o://8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.590940 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.590850 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="nbdb" containerID="cri-o://0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.668272 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" containerID="cri-o://0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" gracePeriod=30 Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.735417 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.735524 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.738021 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.738113 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.740740 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"sb\"\n"] Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.740765 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" cmd=["/bin/bash","-c","set -xeo pipefail\n. /ovnkube-lib/ovnkube-lib.sh || exit 1\novndb-readiness-probe \"nb\"\n"] Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.740777 4702 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="sbdb" Nov 25 10:44:34 crc kubenswrapper[4702]: E1125 10:44:34.740804 4702 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="nbdb" Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.967552 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/3.log" Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.971774 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovn-acl-logging/0.log" Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.972404 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovn-controller/0.log" Nov 25 10:44:34 crc kubenswrapper[4702]: I1125 10:44:34.972840 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.028681 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/2.log" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.029143 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/1.log" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.029180 4702 generic.go:334] "Generic (PLEG): container finished" podID="fc7bcda9-5809-4852-8dd7-414ead106d61" containerID="556a391af94990867eec33ada4cd8f20bcf026bb13b614eccc44d39b2b055d1f" exitCode=2 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.029242 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerDied","Data":"556a391af94990867eec33ada4cd8f20bcf026bb13b614eccc44d39b2b055d1f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.029280 4702 scope.go:117] "RemoveContainer" containerID="9046a2141142039dd6d9e5eb9ba19541ada30fcb5cf84e31e4f87a7f7fca9ddf" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.029795 4702 scope.go:117] "RemoveContainer" containerID="556a391af94990867eec33ada4cd8f20bcf026bb13b614eccc44d39b2b055d1f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.032035 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovnkube-controller/3.log" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.055308 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovn-acl-logging/0.log" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057492 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-664t9"] Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057716 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057735 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057745 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="nbdb" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057753 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="nbdb" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057765 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="northd" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057773 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="northd" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057785 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057794 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057803 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kubecfg-setup" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057810 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kubecfg-setup" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057819 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-node" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057826 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-node" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057834 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-ovn-metrics" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057842 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-ovn-metrics" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057851 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057859 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057870 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-acl-logging" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057877 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-acl-logging" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057888 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="sbdb" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057896 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="sbdb" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057921 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057930 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.057946 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.057954 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058056 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-acl-logging" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058069 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="nbdb" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058080 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058088 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovn-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058098 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058106 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058114 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-node" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058122 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058133 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="northd" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058142 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="sbdb" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058151 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="kube-rbac-proxy-ovn-metrics" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.058247 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058255 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.058353 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerName="ovnkube-controller" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.063730 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.069328 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-h8hn4_a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/ovn-controller/0.log" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.072998 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-kubelet\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073074 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/91f129ea-9609-4084-b37d-fdf68b3a8071-ovn-node-metrics-cert\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073116 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-run-netns\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073446 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-ovn\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073616 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-run-ovn-kubernetes\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073651 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-env-overrides\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073676 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-systemd-units\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073722 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073768 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-etc-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073795 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-log-socket\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073847 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-cni-bin\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073871 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-var-lib-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.073897 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.074381 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-cni-netd\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.074537 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-node-log\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.074572 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-systemd\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.074622 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-ovnkube-config\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.074674 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-slash\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.076808 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" exitCode=0 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077119 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" exitCode=0 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077139 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" exitCode=0 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077149 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" exitCode=0 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077156 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" exitCode=0 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077165 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" exitCode=0 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077172 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" exitCode=143 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077181 4702 generic.go:334] "Generic (PLEG): container finished" podID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" exitCode=143 Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077203 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077236 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077250 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077262 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077277 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077288 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077299 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077312 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077318 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077324 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077330 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077336 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077342 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077348 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077357 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077363 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077371 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077296 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.077379 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078578 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078598 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078609 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078616 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078623 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078631 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078637 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078644 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078651 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078678 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078703 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078710 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078717 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078725 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078732 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078738 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078744 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078750 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078757 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078895 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078932 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-h8hn4" event={"ID":"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6","Type":"ContainerDied","Data":"b0d0d80327a02d53b7b6262ecc0ee5b33b184904471ffe565634dba2bdecd0d4"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078955 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078962 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078968 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078973 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078978 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078983 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078992 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.078998 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.079003 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.079008 4702 pod_container_deletor.go:114] "Failed to issue the request to remove container" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.094029 4702 scope.go:117] "RemoveContainer" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.120070 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.139307 4702 scope.go:117] "RemoveContainer" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.149941 4702 scope.go:117] "RemoveContainer" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.160589 4702 scope.go:117] "RemoveContainer" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.173622 4702 scope.go:117] "RemoveContainer" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175280 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-script-lib\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175306 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-config\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175326 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-bin\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175351 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-etc-openvswitch\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175371 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovn-node-metrics-cert\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175400 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-kubelet\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175419 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-systemd-units\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175435 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8mlzf\" (UniqueName: \"kubernetes.io/projected/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-kube-api-access-8mlzf\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175472 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-openvswitch\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175495 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-netns\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175521 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-slash\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175525 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175546 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-env-overrides\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175557 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175600 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175624 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175695 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.175709 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-slash" (OuterVolumeSpecName: "host-slash") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176004 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176108 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176197 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176502 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176556 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-log-socket\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176585 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-ovn-kubernetes\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176605 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-netd\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176621 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-node-log\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176656 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-var-lib-cni-networks-ovn-kubernetes\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176690 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-ovn\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176712 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-systemd\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176736 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-var-lib-openvswitch\") pod \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\" (UID: \"a50f8b41-e2d8-4d32-9306-bdb2a753a4b6\") " Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176807 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-log-socket" (OuterVolumeSpecName: "log-socket") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176853 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-node-log" (OuterVolumeSpecName: "node-log") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176884 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176887 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-cni-netd\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176940 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-cni-netd\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176981 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.176986 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-node-log\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177023 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-ovnkube-config\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177049 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-systemd\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177077 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-slash\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177138 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-kubelet\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177003 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177273 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-ovnkube-script-lib\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177047 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177176 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177273 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-systemd\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177307 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-slash\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177350 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-node-log\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177366 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-kubelet\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177492 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/91f129ea-9609-4084-b37d-fdf68b3a8071-ovn-node-metrics-cert\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177539 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-run-netns\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177567 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-ovn\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177587 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-run-ovn-kubernetes\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177612 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-env-overrides\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177633 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-systemd-units\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177641 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-run-netns\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177657 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tn444\" (UniqueName: \"kubernetes.io/projected/91f129ea-9609-4084-b37d-fdf68b3a8071-kube-api-access-tn444\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177693 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177727 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-etc-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177752 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-log-socket\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177785 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-cni-bin\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177810 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-var-lib-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177834 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177887 4702 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-config\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177917 4702 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177930 4702 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-bin\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177942 4702 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-kubelet\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177952 4702 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-systemd-units\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177966 4702 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177977 4702 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-netns\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177988 4702 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-slash\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178000 4702 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-env-overrides\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178010 4702 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-log-socket\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178021 4702 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178034 4702 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-cni-netd\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178044 4702 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-node-log\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178056 4702 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178068 4702 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-ovn\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178082 4702 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178096 4702 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178127 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178161 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-ovn\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178188 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-run-ovn-kubernetes\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178354 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-run-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.177603 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-ovnkube-config\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178391 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-systemd-units\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178418 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-log-socket\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178437 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-etc-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178459 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-host-cni-bin\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178479 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/91f129ea-9609-4084-b37d-fdf68b3a8071-var-lib-openvswitch\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.178578 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-env-overrides\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.181597 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-kube-api-access-8mlzf" (OuterVolumeSpecName: "kube-api-access-8mlzf") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "kube-api-access-8mlzf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.182400 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.182428 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/91f129ea-9609-4084-b37d-fdf68b3a8071-ovn-node-metrics-cert\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.185805 4702 scope.go:117] "RemoveContainer" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.190108 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" (UID: "a50f8b41-e2d8-4d32-9306-bdb2a753a4b6"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.198986 4702 scope.go:117] "RemoveContainer" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.209938 4702 scope.go:117] "RemoveContainer" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.225347 4702 scope.go:117] "RemoveContainer" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.241095 4702 scope.go:117] "RemoveContainer" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.241606 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": container with ID starting with 0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2 not found: ID does not exist" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.241646 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} err="failed to get container status \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": rpc error: code = NotFound desc = could not find container \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": container with ID starting with 0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.241674 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.242080 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": container with ID starting with cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c not found: ID does not exist" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.242112 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} err="failed to get container status \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": rpc error: code = NotFound desc = could not find container \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": container with ID starting with cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.242136 4702 scope.go:117] "RemoveContainer" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.242470 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": container with ID starting with 8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf not found: ID does not exist" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.242498 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} err="failed to get container status \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": rpc error: code = NotFound desc = could not find container \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": container with ID starting with 8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.242516 4702 scope.go:117] "RemoveContainer" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.243203 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": container with ID starting with 0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78 not found: ID does not exist" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.243250 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} err="failed to get container status \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": rpc error: code = NotFound desc = could not find container \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": container with ID starting with 0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.243279 4702 scope.go:117] "RemoveContainer" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.243579 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": container with ID starting with c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f not found: ID does not exist" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.243607 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} err="failed to get container status \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": rpc error: code = NotFound desc = could not find container \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": container with ID starting with c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.243626 4702 scope.go:117] "RemoveContainer" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.243917 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": container with ID starting with fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383 not found: ID does not exist" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.243943 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} err="failed to get container status \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": rpc error: code = NotFound desc = could not find container \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": container with ID starting with fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.243960 4702 scope.go:117] "RemoveContainer" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.244181 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": container with ID starting with 73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9 not found: ID does not exist" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.244207 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} err="failed to get container status \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": rpc error: code = NotFound desc = could not find container \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": container with ID starting with 73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.244224 4702 scope.go:117] "RemoveContainer" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.244477 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": container with ID starting with 6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6 not found: ID does not exist" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.244529 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} err="failed to get container status \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": rpc error: code = NotFound desc = could not find container \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": container with ID starting with 6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.244544 4702 scope.go:117] "RemoveContainer" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.244871 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": container with ID starting with ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f not found: ID does not exist" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.244891 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} err="failed to get container status \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": rpc error: code = NotFound desc = could not find container \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": container with ID starting with ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.244922 4702 scope.go:117] "RemoveContainer" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" Nov 25 10:44:35 crc kubenswrapper[4702]: E1125 10:44:35.245182 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": container with ID starting with cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65 not found: ID does not exist" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.245214 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} err="failed to get container status \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": rpc error: code = NotFound desc = could not find container \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": container with ID starting with cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.245233 4702 scope.go:117] "RemoveContainer" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.245596 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} err="failed to get container status \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": rpc error: code = NotFound desc = could not find container \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": container with ID starting with 0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.245645 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246013 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} err="failed to get container status \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": rpc error: code = NotFound desc = could not find container \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": container with ID starting with cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246041 4702 scope.go:117] "RemoveContainer" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246314 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} err="failed to get container status \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": rpc error: code = NotFound desc = could not find container \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": container with ID starting with 8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246338 4702 scope.go:117] "RemoveContainer" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246674 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} err="failed to get container status \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": rpc error: code = NotFound desc = could not find container \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": container with ID starting with 0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246700 4702 scope.go:117] "RemoveContainer" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.246987 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} err="failed to get container status \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": rpc error: code = NotFound desc = could not find container \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": container with ID starting with c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247022 4702 scope.go:117] "RemoveContainer" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247226 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} err="failed to get container status \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": rpc error: code = NotFound desc = could not find container \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": container with ID starting with fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247248 4702 scope.go:117] "RemoveContainer" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247486 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} err="failed to get container status \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": rpc error: code = NotFound desc = could not find container \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": container with ID starting with 73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247506 4702 scope.go:117] "RemoveContainer" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247759 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} err="failed to get container status \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": rpc error: code = NotFound desc = could not find container \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": container with ID starting with 6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.247812 4702 scope.go:117] "RemoveContainer" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.248236 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} err="failed to get container status \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": rpc error: code = NotFound desc = could not find container \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": container with ID starting with ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.248274 4702 scope.go:117] "RemoveContainer" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.248629 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} err="failed to get container status \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": rpc error: code = NotFound desc = could not find container \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": container with ID starting with cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.248668 4702 scope.go:117] "RemoveContainer" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.248889 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} err="failed to get container status \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": rpc error: code = NotFound desc = could not find container \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": container with ID starting with 0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.248926 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.249385 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} err="failed to get container status \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": rpc error: code = NotFound desc = could not find container \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": container with ID starting with cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.249421 4702 scope.go:117] "RemoveContainer" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.249685 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} err="failed to get container status \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": rpc error: code = NotFound desc = could not find container \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": container with ID starting with 8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.249707 4702 scope.go:117] "RemoveContainer" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.249930 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} err="failed to get container status \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": rpc error: code = NotFound desc = could not find container \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": container with ID starting with 0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.249954 4702 scope.go:117] "RemoveContainer" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.250254 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} err="failed to get container status \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": rpc error: code = NotFound desc = could not find container \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": container with ID starting with c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.250277 4702 scope.go:117] "RemoveContainer" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.250557 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} err="failed to get container status \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": rpc error: code = NotFound desc = could not find container \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": container with ID starting with fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.250608 4702 scope.go:117] "RemoveContainer" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.250855 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} err="failed to get container status \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": rpc error: code = NotFound desc = could not find container \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": container with ID starting with 73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.250874 4702 scope.go:117] "RemoveContainer" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.251178 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} err="failed to get container status \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": rpc error: code = NotFound desc = could not find container \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": container with ID starting with 6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.251191 4702 scope.go:117] "RemoveContainer" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.251419 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} err="failed to get container status \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": rpc error: code = NotFound desc = could not find container \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": container with ID starting with ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.251445 4702 scope.go:117] "RemoveContainer" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.251686 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} err="failed to get container status \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": rpc error: code = NotFound desc = could not find container \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": container with ID starting with cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.251708 4702 scope.go:117] "RemoveContainer" containerID="0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252002 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2"} err="failed to get container status \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": rpc error: code = NotFound desc = could not find container \"0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2\": container with ID starting with 0235d87558e578be81606410992869492269a54d0efbe1dd82067e78cca37ee2 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252040 4702 scope.go:117] "RemoveContainer" containerID="cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252262 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c"} err="failed to get container status \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": rpc error: code = NotFound desc = could not find container \"cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c\": container with ID starting with cb4c7519258f9f6306dd1500e9e471e2711bd38d91bd8e2f567dbe389736118c not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252289 4702 scope.go:117] "RemoveContainer" containerID="8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252549 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf"} err="failed to get container status \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": rpc error: code = NotFound desc = could not find container \"8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf\": container with ID starting with 8aceaddc52f271339cb9344858e556fc0f638842ae464e11a2d286d4f36215cf not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252572 4702 scope.go:117] "RemoveContainer" containerID="0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252886 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78"} err="failed to get container status \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": rpc error: code = NotFound desc = could not find container \"0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78\": container with ID starting with 0a1c2f1227c236d5f1fb14b71acef5a5bec687aac597eac6acbe8cb9782a2d78 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.252954 4702 scope.go:117] "RemoveContainer" containerID="c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.253177 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f"} err="failed to get container status \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": rpc error: code = NotFound desc = could not find container \"c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f\": container with ID starting with c5b59d998dd368f196bca14b36ca2b92d7ad696e90603a857c89f8519e318a3f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.253214 4702 scope.go:117] "RemoveContainer" containerID="fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.253455 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383"} err="failed to get container status \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": rpc error: code = NotFound desc = could not find container \"fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383\": container with ID starting with fad361626480ce6539d23752a14273ba4c79a9a5136fd990cb9091e89feee383 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.253474 4702 scope.go:117] "RemoveContainer" containerID="73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.253819 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9"} err="failed to get container status \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": rpc error: code = NotFound desc = could not find container \"73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9\": container with ID starting with 73f55f3308ca1d818c8bd2fc37c9e18721c97a44bb3ddc8785d40d7422136aa9 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.253854 4702 scope.go:117] "RemoveContainer" containerID="6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.254204 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6"} err="failed to get container status \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": rpc error: code = NotFound desc = could not find container \"6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6\": container with ID starting with 6a1eb17d712b93d54f2773ac4a3ed11dbb89c3f75786c7693d01552ca4e187c6 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.254223 4702 scope.go:117] "RemoveContainer" containerID="ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.254819 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f"} err="failed to get container status \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": rpc error: code = NotFound desc = could not find container \"ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f\": container with ID starting with ab9177c3f2dd2115e068797d80f10cc1ee756ef9642f67429286c3fef08a075f not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.254841 4702 scope.go:117] "RemoveContainer" containerID="cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.255165 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65"} err="failed to get container status \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": rpc error: code = NotFound desc = could not find container \"cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65\": container with ID starting with cb3a2d5d8b090a7ce5732065b2a5f5c5ed670552136afae9d0987de466b5bc65 not found: ID does not exist" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.279203 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tn444\" (UniqueName: \"kubernetes.io/projected/91f129ea-9609-4084-b37d-fdf68b3a8071-kube-api-access-tn444\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.279339 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-ovnkube-script-lib\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.279440 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8mlzf\" (UniqueName: \"kubernetes.io/projected/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-kube-api-access-8mlzf\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.279458 4702 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-run-systemd\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.279471 4702 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.280019 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/91f129ea-9609-4084-b37d-fdf68b3a8071-ovnkube-script-lib\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.305736 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tn444\" (UniqueName: \"kubernetes.io/projected/91f129ea-9609-4084-b37d-fdf68b3a8071-kube-api-access-tn444\") pod \"ovnkube-node-664t9\" (UID: \"91f129ea-9609-4084-b37d-fdf68b3a8071\") " pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.413008 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-h8hn4"] Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.413044 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-h8hn4"] Nov 25 10:44:35 crc kubenswrapper[4702]: I1125 10:44:35.414600 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:35 crc kubenswrapper[4702]: W1125 10:44:35.438469 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91f129ea_9609_4084_b37d_fdf68b3a8071.slice/crio-e8b65fb50ecbff0866f37b0b5dbcae0caa9ac412902009fb6124c69036a76f2a WatchSource:0}: Error finding container e8b65fb50ecbff0866f37b0b5dbcae0caa9ac412902009fb6124c69036a76f2a: Status 404 returned error can't find the container with id e8b65fb50ecbff0866f37b0b5dbcae0caa9ac412902009fb6124c69036a76f2a Nov 25 10:44:36 crc kubenswrapper[4702]: I1125 10:44:36.083082 4702 generic.go:334] "Generic (PLEG): container finished" podID="91f129ea-9609-4084-b37d-fdf68b3a8071" containerID="e15b5b835b89f72d7a03e8d5e026210e1ac4df201ba10cd7e316e31e4527d81f" exitCode=0 Nov 25 10:44:36 crc kubenswrapper[4702]: I1125 10:44:36.083181 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerDied","Data":"e15b5b835b89f72d7a03e8d5e026210e1ac4df201ba10cd7e316e31e4527d81f"} Nov 25 10:44:36 crc kubenswrapper[4702]: I1125 10:44:36.083253 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"e8b65fb50ecbff0866f37b0b5dbcae0caa9ac412902009fb6124c69036a76f2a"} Nov 25 10:44:36 crc kubenswrapper[4702]: I1125 10:44:36.087264 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-dxlxj_fc7bcda9-5809-4852-8dd7-414ead106d61/kube-multus/2.log" Nov 25 10:44:36 crc kubenswrapper[4702]: I1125 10:44:36.087391 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-dxlxj" event={"ID":"fc7bcda9-5809-4852-8dd7-414ead106d61","Type":"ContainerStarted","Data":"b880ba1f3eecdd7d7e877944df7350ac64e7b07de179711c45c4e73c457c4ef8"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.096985 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"5b3e6fcff0ccf79a95e5dab7eef5072d9d17350d273e270cec37dee86d7292d3"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.097452 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"7f8b5a1b57b6034350950e430ec1aa21f5e7574e0520e1622e257b85a38acb56"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.097472 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"3de8192caf331b325c978aa40a2c390d886871f2d1ae1dd53f160dcbe10f0048"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.097484 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"f7438d5cf3c03ad8ac069f21290ff522a6f198982827d1ccdaf7f9080772ddd4"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.097498 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"edf064a7246b6c1e97ad1305e4a5d706ea082f1e8c7b9ae7f89cc1ac243e2e85"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.097512 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"c9b64293358dec1a1f17b3ab256e97b9c0ceb322b57b457036ec71120eead5b3"} Nov 25 10:44:37 crc kubenswrapper[4702]: I1125 10:44:37.414169 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a50f8b41-e2d8-4d32-9306-bdb2a753a4b6" path="/var/lib/kubelet/pods/a50f8b41-e2d8-4d32-9306-bdb2a753a4b6/volumes" Nov 25 10:44:40 crc kubenswrapper[4702]: I1125 10:44:40.119407 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"0badce7aea47149c88e051f8a0cc7eb2df073bd71394c58edd989c54e48ae0d2"} Nov 25 10:44:43 crc kubenswrapper[4702]: I1125 10:44:43.591449 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:44:43 crc kubenswrapper[4702]: I1125 10:44:43.593058 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:44:44 crc kubenswrapper[4702]: I1125 10:44:44.150739 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" event={"ID":"91f129ea-9609-4084-b37d-fdf68b3a8071","Type":"ContainerStarted","Data":"2d0c519ac8df1d64bb2caf78085f5fd3bdce835a6001352df6b0dc57deb37f4a"} Nov 25 10:44:46 crc kubenswrapper[4702]: I1125 10:44:46.162672 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:46 crc kubenswrapper[4702]: I1125 10:44:46.163061 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:46 crc kubenswrapper[4702]: I1125 10:44:46.190280 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:46 crc kubenswrapper[4702]: I1125 10:44:46.200861 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" podStartSLOduration=11.20083609 podStartE2EDuration="11.20083609s" podCreationTimestamp="2025-11-25 10:44:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:44:46.198976638 +0000 UTC m=+783.565572337" watchObservedRunningTime="2025-11-25 10:44:46.20083609 +0000 UTC m=+783.567431799" Nov 25 10:44:47 crc kubenswrapper[4702]: I1125 10:44:47.170152 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:47 crc kubenswrapper[4702]: I1125 10:44:47.249767 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:48 crc kubenswrapper[4702]: I1125 10:44:48.218955 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-664t9" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.311871 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.313704 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.316380 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.316667 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.320338 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.381701 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.381803 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.482726 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.482805 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.482864 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.509388 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:50 crc kubenswrapper[4702]: I1125 10:44:50.637072 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:51 crc kubenswrapper[4702]: I1125 10:44:51.074672 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Nov 25 10:44:51 crc kubenswrapper[4702]: I1125 10:44:51.196104 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773","Type":"ContainerStarted","Data":"9792c3f3babe9d668e6d1deefc2ffe6193ec17d545cfad86927fdcd39b513ab6"} Nov 25 10:44:52 crc kubenswrapper[4702]: I1125 10:44:52.201734 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773","Type":"ContainerStarted","Data":"bd153d99798f34fd05ca1c995e55b0c7a265cd19bc1bbaeccae1ed24e765ae56"} Nov 25 10:44:52 crc kubenswrapper[4702]: I1125 10:44:52.214711 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-9-crc" podStartSLOduration=2.214692086 podStartE2EDuration="2.214692086s" podCreationTimestamp="2025-11-25 10:44:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:44:52.213469332 +0000 UTC m=+789.580065041" watchObservedRunningTime="2025-11-25 10:44:52.214692086 +0000 UTC m=+789.581287785" Nov 25 10:44:53 crc kubenswrapper[4702]: I1125 10:44:53.209292 4702 generic.go:334] "Generic (PLEG): container finished" podID="0f58dd96-d9dd-4ae9-96c9-d6eec42a1773" containerID="bd153d99798f34fd05ca1c995e55b0c7a265cd19bc1bbaeccae1ed24e765ae56" exitCode=0 Nov 25 10:44:53 crc kubenswrapper[4702]: I1125 10:44:53.209373 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773","Type":"ContainerDied","Data":"bd153d99798f34fd05ca1c995e55b0c7a265cd19bc1bbaeccae1ed24e765ae56"} Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.506608 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.545074 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kubelet-dir\") pod \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.545131 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kube-api-access\") pod \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\" (UID: \"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773\") " Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.545143 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "0f58dd96-d9dd-4ae9-96c9-d6eec42a1773" (UID: "0f58dd96-d9dd-4ae9-96c9-d6eec42a1773"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.545271 4702 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.551921 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0f58dd96-d9dd-4ae9-96c9-d6eec42a1773" (UID: "0f58dd96-d9dd-4ae9-96c9-d6eec42a1773"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:44:54 crc kubenswrapper[4702]: I1125 10:44:54.646380 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0f58dd96-d9dd-4ae9-96c9-d6eec42a1773-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:44:55 crc kubenswrapper[4702]: I1125 10:44:55.221503 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"0f58dd96-d9dd-4ae9-96c9-d6eec42a1773","Type":"ContainerDied","Data":"9792c3f3babe9d668e6d1deefc2ffe6193ec17d545cfad86927fdcd39b513ab6"} Nov 25 10:44:55 crc kubenswrapper[4702]: I1125 10:44:55.221546 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Nov 25 10:44:55 crc kubenswrapper[4702]: I1125 10:44:55.221549 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9792c3f3babe9d668e6d1deefc2ffe6193ec17d545cfad86927fdcd39b513ab6" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.707864 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 25 10:44:56 crc kubenswrapper[4702]: E1125 10:44:56.708753 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0f58dd96-d9dd-4ae9-96c9-d6eec42a1773" containerName="pruner" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.708772 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="0f58dd96-d9dd-4ae9-96c9-d6eec42a1773" containerName="pruner" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.708926 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="0f58dd96-d9dd-4ae9-96c9-d6eec42a1773" containerName="pruner" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.709537 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.711770 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.711952 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.722090 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.775014 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-var-lock\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.775093 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.775171 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kube-api-access\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.876804 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-var-lock\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.877112 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.877324 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kube-api-access\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.877333 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kubelet-dir\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.876914 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-var-lock\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:56 crc kubenswrapper[4702]: I1125 10:44:56.901342 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kube-api-access\") pod \"installer-9-crc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:57 crc kubenswrapper[4702]: I1125 10:44:57.025852 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:44:57 crc kubenswrapper[4702]: I1125 10:44:57.439239 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Nov 25 10:44:57 crc kubenswrapper[4702]: W1125 10:44:57.446816 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-pod4b8a5a80_f55f_4479_be86_0c8dfaaaa4cc.slice/crio-52ff4b1478213e9ba6c9842fffe51d0f576ec10b172d91d3c60deb4b0a1c3b81 WatchSource:0}: Error finding container 52ff4b1478213e9ba6c9842fffe51d0f576ec10b172d91d3c60deb4b0a1c3b81: Status 404 returned error can't find the container with id 52ff4b1478213e9ba6c9842fffe51d0f576ec10b172d91d3c60deb4b0a1c3b81 Nov 25 10:44:58 crc kubenswrapper[4702]: I1125 10:44:58.240514 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc","Type":"ContainerStarted","Data":"8ac0999c141d74f7d70773773a4a931570b2eecbb829aab8ee6519a00ed325df"} Nov 25 10:44:58 crc kubenswrapper[4702]: I1125 10:44:58.240892 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc","Type":"ContainerStarted","Data":"52ff4b1478213e9ba6c9842fffe51d0f576ec10b172d91d3c60deb4b0a1c3b81"} Nov 25 10:44:58 crc kubenswrapper[4702]: I1125 10:44:58.257418 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=2.257397403 podStartE2EDuration="2.257397403s" podCreationTimestamp="2025-11-25 10:44:56 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:44:58.255646704 +0000 UTC m=+795.622242393" watchObservedRunningTime="2025-11-25 10:44:58.257397403 +0000 UTC m=+795.623993112" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.145312 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz"] Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.146194 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.150025 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.150191 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.157637 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz"] Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.229467 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-config-volume\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.330444 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-config-volume\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.330545 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtlfj\" (UniqueName: \"kubernetes.io/projected/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-kube-api-access-mtlfj\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.330578 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-secret-volume\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.331765 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-config-volume\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.431643 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtlfj\" (UniqueName: \"kubernetes.io/projected/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-kube-api-access-mtlfj\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.431718 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-secret-volume\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.448274 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-secret-volume\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.449639 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtlfj\" (UniqueName: \"kubernetes.io/projected/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-kube-api-access-mtlfj\") pod \"collect-profiles-29401125-mfcsz\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.465421 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:00 crc kubenswrapper[4702]: I1125 10:45:00.662310 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz"] Nov 25 10:45:01 crc kubenswrapper[4702]: I1125 10:45:01.258704 4702 generic.go:334] "Generic (PLEG): container finished" podID="c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" containerID="575de5a5687353cc12b1550d4553031056df61983bbeb02702691ed685dede83" exitCode=0 Nov 25 10:45:01 crc kubenswrapper[4702]: I1125 10:45:01.258962 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" event={"ID":"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60","Type":"ContainerDied","Data":"575de5a5687353cc12b1550d4553031056df61983bbeb02702691ed685dede83"} Nov 25 10:45:01 crc kubenswrapper[4702]: I1125 10:45:01.259268 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" event={"ID":"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60","Type":"ContainerStarted","Data":"8b04cc6b6c4697c5feeb10195d0b624c9180fd19be9b4ef03bf1aa9d85e2e1db"} Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.530776 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.662123 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mtlfj\" (UniqueName: \"kubernetes.io/projected/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-kube-api-access-mtlfj\") pod \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.662318 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-secret-volume\") pod \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.662361 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-config-volume\") pod \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\" (UID: \"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60\") " Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.664880 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-config-volume" (OuterVolumeSpecName: "config-volume") pod "c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" (UID: "c067fbe6-2f3a-4041-9d09-6dfba1ef9d60"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.669713 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" (UID: "c067fbe6-2f3a-4041-9d09-6dfba1ef9d60"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.670492 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-kube-api-access-mtlfj" (OuterVolumeSpecName: "kube-api-access-mtlfj") pod "c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" (UID: "c067fbe6-2f3a-4041-9d09-6dfba1ef9d60"). InnerVolumeSpecName "kube-api-access-mtlfj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.765364 4702 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.765415 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mtlfj\" (UniqueName: \"kubernetes.io/projected/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-kube-api-access-mtlfj\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:02 crc kubenswrapper[4702]: I1125 10:45:02.765430 4702 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/c067fbe6-2f3a-4041-9d09-6dfba1ef9d60-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.211409 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-mn4fv"] Nov 25 10:45:03 crc kubenswrapper[4702]: E1125 10:45:03.211653 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" containerName="collect-profiles" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.211677 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" containerName="collect-profiles" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.211799 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c067fbe6-2f3a-4041-9d09-6dfba1ef9d60" containerName="collect-profiles" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.212305 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.216088 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-48592" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.216428 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.216579 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.224796 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-mn4fv"] Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.277093 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" event={"ID":"c067fbe6-2f3a-4041-9d09-6dfba1ef9d60","Type":"ContainerDied","Data":"8b04cc6b6c4697c5feeb10195d0b624c9180fd19be9b4ef03bf1aa9d85e2e1db"} Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.277141 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8b04cc6b6c4697c5feeb10195d0b624c9180fd19be9b4ef03bf1aa9d85e2e1db" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.277423 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401125-mfcsz" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.373686 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n5t2z\" (UniqueName: \"kubernetes.io/projected/1a354cb2-dd0f-42f8-9d2e-0871756e202d-kube-api-access-n5t2z\") pod \"mariadb-operator-index-mn4fv\" (UID: \"1a354cb2-dd0f-42f8-9d2e-0871756e202d\") " pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.475277 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n5t2z\" (UniqueName: \"kubernetes.io/projected/1a354cb2-dd0f-42f8-9d2e-0871756e202d-kube-api-access-n5t2z\") pod \"mariadb-operator-index-mn4fv\" (UID: \"1a354cb2-dd0f-42f8-9d2e-0871756e202d\") " pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.499460 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n5t2z\" (UniqueName: \"kubernetes.io/projected/1a354cb2-dd0f-42f8-9d2e-0871756e202d-kube-api-access-n5t2z\") pod \"mariadb-operator-index-mn4fv\" (UID: \"1a354cb2-dd0f-42f8-9d2e-0871756e202d\") " pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:03 crc kubenswrapper[4702]: I1125 10:45:03.529643 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:04 crc kubenswrapper[4702]: I1125 10:45:04.037616 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-mn4fv"] Nov 25 10:45:04 crc kubenswrapper[4702]: I1125 10:45:04.058138 4702 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 10:45:04 crc kubenswrapper[4702]: I1125 10:45:04.286309 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-mn4fv" event={"ID":"1a354cb2-dd0f-42f8-9d2e-0871756e202d","Type":"ContainerStarted","Data":"e5437ddcedbc7392a55fb70a3c32a814a16ac0aa473c6b3635d4aa82b014fd72"} Nov 25 10:45:05 crc kubenswrapper[4702]: I1125 10:45:05.979926 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-mn4fv"] Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.591679 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-vdhbr"] Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.592923 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.602001 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-vdhbr"] Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.723634 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hg6xn\" (UniqueName: \"kubernetes.io/projected/57834186-3a16-48fa-a5b5-fc12a25825af-kube-api-access-hg6xn\") pod \"mariadb-operator-index-vdhbr\" (UID: \"57834186-3a16-48fa-a5b5-fc12a25825af\") " pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.825056 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hg6xn\" (UniqueName: \"kubernetes.io/projected/57834186-3a16-48fa-a5b5-fc12a25825af-kube-api-access-hg6xn\") pod \"mariadb-operator-index-vdhbr\" (UID: \"57834186-3a16-48fa-a5b5-fc12a25825af\") " pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.845971 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hg6xn\" (UniqueName: \"kubernetes.io/projected/57834186-3a16-48fa-a5b5-fc12a25825af-kube-api-access-hg6xn\") pod \"mariadb-operator-index-vdhbr\" (UID: \"57834186-3a16-48fa-a5b5-fc12a25825af\") " pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:06 crc kubenswrapper[4702]: I1125 10:45:06.916218 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.310684 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-mn4fv" event={"ID":"1a354cb2-dd0f-42f8-9d2e-0871756e202d","Type":"ContainerStarted","Data":"e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1"} Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.311182 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-mn4fv" podUID="1a354cb2-dd0f-42f8-9d2e-0871756e202d" containerName="registry-server" containerID="cri-o://e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1" gracePeriod=2 Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.333714 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-mn4fv" podStartSLOduration=1.480312683 podStartE2EDuration="4.333688135s" podCreationTimestamp="2025-11-25 10:45:03 +0000 UTC" firstStartedPulling="2025-11-25 10:45:04.057799056 +0000 UTC m=+801.424394745" lastFinishedPulling="2025-11-25 10:45:06.911174508 +0000 UTC m=+804.277770197" observedRunningTime="2025-11-25 10:45:07.328275993 +0000 UTC m=+804.694871692" watchObservedRunningTime="2025-11-25 10:45:07.333688135 +0000 UTC m=+804.700283834" Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.352493 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-vdhbr"] Nov 25 10:45:07 crc kubenswrapper[4702]: W1125 10:45:07.430613 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57834186_3a16_48fa_a5b5_fc12a25825af.slice/crio-b539766b96c30af7aabaff41faad48baa9e2b95ab4b72040e90b62617ec5e25c WatchSource:0}: Error finding container b539766b96c30af7aabaff41faad48baa9e2b95ab4b72040e90b62617ec5e25c: Status 404 returned error can't find the container with id b539766b96c30af7aabaff41faad48baa9e2b95ab4b72040e90b62617ec5e25c Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.662670 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.838497 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n5t2z\" (UniqueName: \"kubernetes.io/projected/1a354cb2-dd0f-42f8-9d2e-0871756e202d-kube-api-access-n5t2z\") pod \"1a354cb2-dd0f-42f8-9d2e-0871756e202d\" (UID: \"1a354cb2-dd0f-42f8-9d2e-0871756e202d\") " Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.844645 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a354cb2-dd0f-42f8-9d2e-0871756e202d-kube-api-access-n5t2z" (OuterVolumeSpecName: "kube-api-access-n5t2z") pod "1a354cb2-dd0f-42f8-9d2e-0871756e202d" (UID: "1a354cb2-dd0f-42f8-9d2e-0871756e202d"). InnerVolumeSpecName "kube-api-access-n5t2z". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:45:07 crc kubenswrapper[4702]: I1125 10:45:07.940332 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n5t2z\" (UniqueName: \"kubernetes.io/projected/1a354cb2-dd0f-42f8-9d2e-0871756e202d-kube-api-access-n5t2z\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.318584 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-vdhbr" event={"ID":"57834186-3a16-48fa-a5b5-fc12a25825af","Type":"ContainerStarted","Data":"75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf"} Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.318956 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-vdhbr" event={"ID":"57834186-3a16-48fa-a5b5-fc12a25825af","Type":"ContainerStarted","Data":"b539766b96c30af7aabaff41faad48baa9e2b95ab4b72040e90b62617ec5e25c"} Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.322619 4702 generic.go:334] "Generic (PLEG): container finished" podID="1a354cb2-dd0f-42f8-9d2e-0871756e202d" containerID="e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1" exitCode=0 Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.322693 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-mn4fv" event={"ID":"1a354cb2-dd0f-42f8-9d2e-0871756e202d","Type":"ContainerDied","Data":"e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1"} Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.322733 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-mn4fv" event={"ID":"1a354cb2-dd0f-42f8-9d2e-0871756e202d","Type":"ContainerDied","Data":"e5437ddcedbc7392a55fb70a3c32a814a16ac0aa473c6b3635d4aa82b014fd72"} Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.322755 4702 scope.go:117] "RemoveContainer" containerID="e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1" Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.322937 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-mn4fv" Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.356018 4702 scope.go:117] "RemoveContainer" containerID="e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1" Nov 25 10:45:08 crc kubenswrapper[4702]: E1125 10:45:08.357696 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1\": container with ID starting with e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1 not found: ID does not exist" containerID="e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1" Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.357739 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1"} err="failed to get container status \"e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1\": rpc error: code = NotFound desc = could not find container \"e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1\": container with ID starting with e05f00c5ecef33393740461cf464afaedd7ea346d386a5326fc0c0981026e1e1 not found: ID does not exist" Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.372172 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-vdhbr" podStartSLOduration=1.939219668 podStartE2EDuration="2.372148268s" podCreationTimestamp="2025-11-25 10:45:06 +0000 UTC" firstStartedPulling="2025-11-25 10:45:07.435299112 +0000 UTC m=+804.801894801" lastFinishedPulling="2025-11-25 10:45:07.868227712 +0000 UTC m=+805.234823401" observedRunningTime="2025-11-25 10:45:08.348589276 +0000 UTC m=+805.715184975" watchObservedRunningTime="2025-11-25 10:45:08.372148268 +0000 UTC m=+805.738743987" Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.372560 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-mn4fv"] Nov 25 10:45:08 crc kubenswrapper[4702]: I1125 10:45:08.376785 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-mn4fv"] Nov 25 10:45:09 crc kubenswrapper[4702]: I1125 10:45:09.411982 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a354cb2-dd0f-42f8-9d2e-0871756e202d" path="/var/lib/kubelet/pods/1a354cb2-dd0f-42f8-9d2e-0871756e202d/volumes" Nov 25 10:45:13 crc kubenswrapper[4702]: I1125 10:45:13.590830 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:45:13 crc kubenswrapper[4702]: I1125 10:45:13.591132 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:45:16 crc kubenswrapper[4702]: I1125 10:45:16.917096 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:16 crc kubenswrapper[4702]: I1125 10:45:16.917432 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:16 crc kubenswrapper[4702]: I1125 10:45:16.944418 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:17 crc kubenswrapper[4702]: I1125 10:45:17.397927 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.620126 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw"] Nov 25 10:45:19 crc kubenswrapper[4702]: E1125 10:45:19.620652 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a354cb2-dd0f-42f8-9d2e-0871756e202d" containerName="registry-server" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.620670 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a354cb2-dd0f-42f8-9d2e-0871756e202d" containerName="registry-server" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.620782 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a354cb2-dd0f-42f8-9d2e-0871756e202d" containerName="registry-server" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.623630 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.630515 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-wkv7r" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.642898 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw"] Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.713727 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-bundle\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.713836 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xxbxz\" (UniqueName: \"kubernetes.io/projected/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-kube-api-access-xxbxz\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.713950 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-util\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.814774 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xxbxz\" (UniqueName: \"kubernetes.io/projected/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-kube-api-access-xxbxz\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.814873 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-util\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.814926 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-bundle\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.815468 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-util\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.815503 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-bundle\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.835789 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xxbxz\" (UniqueName: \"kubernetes.io/projected/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-kube-api-access-xxbxz\") pod \"c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:19 crc kubenswrapper[4702]: I1125 10:45:19.960844 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:20 crc kubenswrapper[4702]: I1125 10:45:20.362102 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw"] Nov 25 10:45:20 crc kubenswrapper[4702]: W1125 10:45:20.366721 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod91dec7ac_37c4_4a24_bd96_3fdcba2e5980.slice/crio-071399e2c85848a7d6d1e99d919eccf1490f5e48e2b60606b9b4f2ee1906a03a WatchSource:0}: Error finding container 071399e2c85848a7d6d1e99d919eccf1490f5e48e2b60606b9b4f2ee1906a03a: Status 404 returned error can't find the container with id 071399e2c85848a7d6d1e99d919eccf1490f5e48e2b60606b9b4f2ee1906a03a Nov 25 10:45:20 crc kubenswrapper[4702]: I1125 10:45:20.394315 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" event={"ID":"91dec7ac-37c4-4a24-bd96-3fdcba2e5980","Type":"ContainerStarted","Data":"071399e2c85848a7d6d1e99d919eccf1490f5e48e2b60606b9b4f2ee1906a03a"} Nov 25 10:45:21 crc kubenswrapper[4702]: I1125 10:45:21.402197 4702 generic.go:334] "Generic (PLEG): container finished" podID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerID="15bf19a3beded83cb15b577f6b4c40c036ef0778894b3eb712fb9eba47ce529b" exitCode=0 Nov 25 10:45:21 crc kubenswrapper[4702]: I1125 10:45:21.410380 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" event={"ID":"91dec7ac-37c4-4a24-bd96-3fdcba2e5980","Type":"ContainerDied","Data":"15bf19a3beded83cb15b577f6b4c40c036ef0778894b3eb712fb9eba47ce529b"} Nov 25 10:45:23 crc kubenswrapper[4702]: I1125 10:45:23.416829 4702 generic.go:334] "Generic (PLEG): container finished" podID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerID="681bfa4695b0ef11796f8b067be07d8e3ff16c7aed72e61510586c3eb3631130" exitCode=0 Nov 25 10:45:23 crc kubenswrapper[4702]: I1125 10:45:23.416991 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" event={"ID":"91dec7ac-37c4-4a24-bd96-3fdcba2e5980","Type":"ContainerDied","Data":"681bfa4695b0ef11796f8b067be07d8e3ff16c7aed72e61510586c3eb3631130"} Nov 25 10:45:24 crc kubenswrapper[4702]: I1125 10:45:24.424375 4702 generic.go:334] "Generic (PLEG): container finished" podID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerID="8de91ee7f27bf7330c8b9346b8ab5aa93d77de3bf601ffd7c126715e0adfa1fa" exitCode=0 Nov 25 10:45:24 crc kubenswrapper[4702]: I1125 10:45:24.424460 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" event={"ID":"91dec7ac-37c4-4a24-bd96-3fdcba2e5980","Type":"ContainerDied","Data":"8de91ee7f27bf7330c8b9346b8ab5aa93d77de3bf601ffd7c126715e0adfa1fa"} Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.658534 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.790176 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-util\") pod \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.790250 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xxbxz\" (UniqueName: \"kubernetes.io/projected/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-kube-api-access-xxbxz\") pod \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.790311 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-bundle\") pod \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\" (UID: \"91dec7ac-37c4-4a24-bd96-3fdcba2e5980\") " Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.791354 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-bundle" (OuterVolumeSpecName: "bundle") pod "91dec7ac-37c4-4a24-bd96-3fdcba2e5980" (UID: "91dec7ac-37c4-4a24-bd96-3fdcba2e5980"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.797368 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-kube-api-access-xxbxz" (OuterVolumeSpecName: "kube-api-access-xxbxz") pod "91dec7ac-37c4-4a24-bd96-3fdcba2e5980" (UID: "91dec7ac-37c4-4a24-bd96-3fdcba2e5980"). InnerVolumeSpecName "kube-api-access-xxbxz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.891483 4702 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:25 crc kubenswrapper[4702]: I1125 10:45:25.891514 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xxbxz\" (UniqueName: \"kubernetes.io/projected/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-kube-api-access-xxbxz\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:26 crc kubenswrapper[4702]: I1125 10:45:26.218087 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-util" (OuterVolumeSpecName: "util") pod "91dec7ac-37c4-4a24-bd96-3fdcba2e5980" (UID: "91dec7ac-37c4-4a24-bd96-3fdcba2e5980"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:45:26 crc kubenswrapper[4702]: I1125 10:45:26.296362 4702 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/91dec7ac-37c4-4a24-bd96-3fdcba2e5980-util\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:26 crc kubenswrapper[4702]: I1125 10:45:26.436470 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" event={"ID":"91dec7ac-37c4-4a24-bd96-3fdcba2e5980","Type":"ContainerDied","Data":"071399e2c85848a7d6d1e99d919eccf1490f5e48e2b60606b9b4f2ee1906a03a"} Nov 25 10:45:26 crc kubenswrapper[4702]: I1125 10:45:26.436513 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="071399e2c85848a7d6d1e99d919eccf1490f5e48e2b60606b9b4f2ee1906a03a" Nov 25 10:45:26 crc kubenswrapper[4702]: I1125 10:45:26.436599 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.644029 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8"] Nov 25 10:45:32 crc kubenswrapper[4702]: E1125 10:45:32.644654 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="util" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.644673 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="util" Nov 25 10:45:32 crc kubenswrapper[4702]: E1125 10:45:32.644690 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="pull" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.644698 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="pull" Nov 25 10:45:32 crc kubenswrapper[4702]: E1125 10:45:32.644716 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="extract" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.644724 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="extract" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.644852 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" containerName="extract" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.645361 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.647796 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8"] Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.648296 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-76znq" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.649520 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.649568 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.685308 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xznc5\" (UniqueName: \"kubernetes.io/projected/c7e80c7d-91e3-4953-bf91-d35441e38743-kube-api-access-xznc5\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.685369 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-webhook-cert\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.685410 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-apiservice-cert\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.786624 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-webhook-cert\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.786709 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-apiservice-cert\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.786796 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xznc5\" (UniqueName: \"kubernetes.io/projected/c7e80c7d-91e3-4953-bf91-d35441e38743-kube-api-access-xznc5\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.794865 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-apiservice-cert\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.795117 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-webhook-cert\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.811411 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xznc5\" (UniqueName: \"kubernetes.io/projected/c7e80c7d-91e3-4953-bf91-d35441e38743-kube-api-access-xznc5\") pod \"mariadb-operator-controller-manager-656f99cfb7-jldk8\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:32 crc kubenswrapper[4702]: I1125 10:45:32.981060 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:33 crc kubenswrapper[4702]: I1125 10:45:33.423653 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8"] Nov 25 10:45:33 crc kubenswrapper[4702]: I1125 10:45:33.472636 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerStarted","Data":"b8030e119264ab27956a9219011c8bc132d1eddedfe9ba52f9303b1ee1f28bbb"} Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.494508 4702 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.495247 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e" gracePeriod=15 Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.495596 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72" gracePeriod=15 Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.495684 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a" gracePeriod=15 Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.495725 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621" gracePeriod=15 Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.495791 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4" gracePeriod=15 Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.506806 4702 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507400 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507415 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507450 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507457 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507466 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507474 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507482 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507488 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507521 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507528 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507536 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507541 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507548 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507554 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: E1125 10:45:35.507665 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507673 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507815 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507827 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507834 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507844 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507853 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507862 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.507891 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.509287 4702 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.510225 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.514454 4702 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="f4b27818a5e8e43d0dc095d08835c792" podUID="71bb4a3aecc4ba5b26c4b7318770ce13" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525590 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525641 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525679 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525718 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525738 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525778 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525798 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.525813 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627353 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627397 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627417 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627451 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627468 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627482 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627509 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627525 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627585 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627619 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627674 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627695 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627715 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627735 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627753 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:35 crc kubenswrapper[4702]: I1125 10:45:35.627774 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.492159 4702 generic.go:334] "Generic (PLEG): container finished" podID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" containerID="8ac0999c141d74f7d70773773a4a931570b2eecbb829aab8ee6519a00ed325df" exitCode=0 Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.492258 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc","Type":"ContainerDied","Data":"8ac0999c141d74f7d70773773a4a931570b2eecbb829aab8ee6519a00ed325df"} Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.493010 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.495432 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.496689 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.497394 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72" exitCode=0 Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.497456 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a" exitCode=0 Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.497471 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621" exitCode=0 Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.497483 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4" exitCode=2 Nov 25 10:45:36 crc kubenswrapper[4702]: I1125 10:45:36.497541 4702 scope.go:117] "RemoveContainer" containerID="499e7130aa4bbd1a089d5e94bc4e407cdca421e5316941df1c1942d65d637e86" Nov 25 10:45:37 crc kubenswrapper[4702]: I1125 10:45:37.506287 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 10:45:37 crc kubenswrapper[4702]: E1125 10:45:37.556580 4702 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/events\": dial tcp 38.102.83.46:6443: connect: connection refused" event="&Event{ObjectMeta:{mariadb-operator-controller-manager-656f99cfb7-jldk8.187b3a1437d028b4 openstack-operators 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openstack-operators,Name:mariadb-operator-controller-manager-656f99cfb7-jldk8,UID:c7e80c7d-91e3-4953-bf91-d35441e38743,APIVersion:v1,ResourceVersion:31487,FieldPath:spec.containers{manager},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openstack-k8s-operators/mariadb-operator@sha256:888edf6f432e52eaa5fc3caeae616fe38a3302b006bbba0e38885b2beba9f0f2\" in 4.123s (4.123s including waiting). Image size: 189259986 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 10:45:37.555826868 +0000 UTC m=+834.922422557,LastTimestamp:2025-11-25 10:45:37.555826868 +0000 UTC m=+834.922422557,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 10:45:37 crc kubenswrapper[4702]: I1125 10:45:37.976751 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:45:37 crc kubenswrapper[4702]: I1125 10:45:37.978048 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.058195 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kubelet-dir\") pod \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.058293 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" (UID: "4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.058375 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kube-api-access\") pod \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.059097 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-var-lock\") pod \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\" (UID: \"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc\") " Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.059289 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-var-lock" (OuterVolumeSpecName: "var-lock") pod "4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" (UID: "4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.059417 4702 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kubelet-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.066540 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" (UID: "4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.160295 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-kube-api-access\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.160338 4702 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc-var-lock\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.378562 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.379509 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.381080 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.381705 4702 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.520000 4702 generic.go:334] "Generic (PLEG): container finished" podID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerID="e12ba024d20e9cd665bc03674f99699dfe21af12a7725f433d65f15b8440db71" exitCode=1 Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.520065 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerDied","Data":"e12ba024d20e9cd665bc03674f99699dfe21af12a7725f433d65f15b8440db71"} Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.520457 4702 scope.go:117] "RemoveContainer" containerID="e12ba024d20e9cd665bc03674f99699dfe21af12a7725f433d65f15b8440db71" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.521084 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.521353 4702 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.521613 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.526044 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.527000 4702 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e" exitCode=0 Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.527054 4702 scope.go:117] "RemoveContainer" containerID="ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.527136 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.528682 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc","Type":"ContainerDied","Data":"52ff4b1478213e9ba6c9842fffe51d0f576ec10b172d91d3c60deb4b0a1c3b81"} Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.528713 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="52ff4b1478213e9ba6c9842fffe51d0f576ec10b172d91d3c60deb4b0a1c3b81" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.528781 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.546227 4702 scope.go:117] "RemoveContainer" containerID="2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.558057 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.558646 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.560550 4702 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.567416 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.567653 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.567494 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.567715 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.567799 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.568011 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.568337 4702 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.568366 4702 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.568378 4702 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.573991 4702 scope.go:117] "RemoveContainer" containerID="19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.591423 4702 scope.go:117] "RemoveContainer" containerID="402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.614081 4702 scope.go:117] "RemoveContainer" containerID="8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.636034 4702 scope.go:117] "RemoveContainer" containerID="983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.660818 4702 scope.go:117] "RemoveContainer" containerID="ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72" Nov 25 10:45:38 crc kubenswrapper[4702]: E1125 10:45:38.665815 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\": container with ID starting with ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72 not found: ID does not exist" containerID="ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.665856 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72"} err="failed to get container status \"ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\": rpc error: code = NotFound desc = could not find container \"ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72\": container with ID starting with ae54dfb21885240c1dc6019af685a2d5eb59eb0f4ef1ba857b765ccda49e9f72 not found: ID does not exist" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.665888 4702 scope.go:117] "RemoveContainer" containerID="2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a" Nov 25 10:45:38 crc kubenswrapper[4702]: E1125 10:45:38.670822 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\": container with ID starting with 2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a not found: ID does not exist" containerID="2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.670961 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a"} err="failed to get container status \"2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\": rpc error: code = NotFound desc = could not find container \"2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a\": container with ID starting with 2d047ad058ea26e2ed9cabda2e319d6f9a35da1c8ef08775127b101d9f8a5c5a not found: ID does not exist" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.671057 4702 scope.go:117] "RemoveContainer" containerID="19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621" Nov 25 10:45:38 crc kubenswrapper[4702]: E1125 10:45:38.671578 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\": container with ID starting with 19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621 not found: ID does not exist" containerID="19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.671690 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621"} err="failed to get container status \"19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\": rpc error: code = NotFound desc = could not find container \"19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621\": container with ID starting with 19d4217ecd6e396f5fcd9e3cdd0a5c6132d86d2f56b38e011c38b2a71b1c1621 not found: ID does not exist" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.671855 4702 scope.go:117] "RemoveContainer" containerID="402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4" Nov 25 10:45:38 crc kubenswrapper[4702]: E1125 10:45:38.672194 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\": container with ID starting with 402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4 not found: ID does not exist" containerID="402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.672234 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4"} err="failed to get container status \"402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\": rpc error: code = NotFound desc = could not find container \"402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4\": container with ID starting with 402ca07beee3478a4988a0bef0c5caca636eb25f739f1dd22978f73ec57b8bb4 not found: ID does not exist" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.672261 4702 scope.go:117] "RemoveContainer" containerID="8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e" Nov 25 10:45:38 crc kubenswrapper[4702]: E1125 10:45:38.673422 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\": container with ID starting with 8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e not found: ID does not exist" containerID="8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.673463 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e"} err="failed to get container status \"8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\": rpc error: code = NotFound desc = could not find container \"8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e\": container with ID starting with 8b1bf45ea1203cca31e06b0736c49fab459030475ab46f10b6793fbb70f9fa4e not found: ID does not exist" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.673487 4702 scope.go:117] "RemoveContainer" containerID="983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade" Nov 25 10:45:38 crc kubenswrapper[4702]: E1125 10:45:38.673745 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\": container with ID starting with 983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade not found: ID does not exist" containerID="983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.673780 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade"} err="failed to get container status \"983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\": rpc error: code = NotFound desc = could not find container \"983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade\": container with ID starting with 983e4a0cf7fa42dbb1303c124a46c91899328016fffedcfb6ae95fa4adc9bade not found: ID does not exist" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.848457 4702 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.848805 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:38 crc kubenswrapper[4702]: I1125 10:45:38.849207 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.410134 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.536736 4702 generic.go:334] "Generic (PLEG): container finished" podID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerID="8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273" exitCode=1 Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.536774 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerDied","Data":"8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273"} Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.536810 4702 scope.go:117] "RemoveContainer" containerID="e12ba024d20e9cd665bc03674f99699dfe21af12a7725f433d65f15b8440db71" Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.537417 4702 scope.go:117] "RemoveContainer" containerID="8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273" Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.537507 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:39 crc kubenswrapper[4702]: E1125 10:45:39.537710 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:45:39 crc kubenswrapper[4702]: I1125 10:45:39.537776 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:40 crc kubenswrapper[4702]: E1125 10:45:40.533489 4702 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.46:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:40 crc kubenswrapper[4702]: I1125 10:45:40.535048 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:40 crc kubenswrapper[4702]: I1125 10:45:40.545188 4702 scope.go:117] "RemoveContainer" containerID="8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273" Nov 25 10:45:40 crc kubenswrapper[4702]: E1125 10:45:40.545361 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:45:40 crc kubenswrapper[4702]: I1125 10:45:40.545431 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:40 crc kubenswrapper[4702]: I1125 10:45:40.546012 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:41 crc kubenswrapper[4702]: I1125 10:45:41.551083 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7"} Nov 25 10:45:41 crc kubenswrapper[4702]: I1125 10:45:41.551461 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"cd10d115714b3d26476105f031bdf97529573d1e56405a9e1cfa037933a6f531"} Nov 25 10:45:41 crc kubenswrapper[4702]: E1125 10:45:41.552128 4702 kubelet.go:1929] "Failed creating a mirror pod for" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods\": dial tcp 38.102.83.46:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:45:41 crc kubenswrapper[4702]: I1125 10:45:41.552130 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:41 crc kubenswrapper[4702]: I1125 10:45:41.552633 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:42 crc kubenswrapper[4702]: I1125 10:45:42.981611 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:42 crc kubenswrapper[4702]: I1125 10:45:42.982566 4702 scope.go:117] "RemoveContainer" containerID="8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273" Nov 25 10:45:42 crc kubenswrapper[4702]: E1125 10:45:42.982815 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 10s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.406033 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.406361 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.591141 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.591230 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.591295 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.592182 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7918e86b322b2cb7cfa46c8cc2dfa0c27b9015c392a35a3637cb12006c4d3205"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:45:43 crc kubenswrapper[4702]: I1125 10:45:43.592259 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://7918e86b322b2cb7cfa46c8cc2dfa0c27b9015c392a35a3637cb12006c4d3205" gracePeriod=600 Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.568551 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="7918e86b322b2cb7cfa46c8cc2dfa0c27b9015c392a35a3637cb12006c4d3205" exitCode=0 Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.568640 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"7918e86b322b2cb7cfa46c8cc2dfa0c27b9015c392a35a3637cb12006c4d3205"} Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.568963 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"3dfc5653761b32b3ec56bc025998710cbd6ef0729baccffe43614c093e896dd9"} Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.568990 4702 scope.go:117] "RemoveContainer" containerID="ec26713a85198746880277f7b0b371a03ca59293010ad77d43053fb85009ce0f" Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.569785 4702 status_manager.go:851] "Failed to get status for pod" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-g5m5h\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.570061 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:44 crc kubenswrapper[4702]: I1125 10:45:44.570523 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.340976 4702 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.341708 4702 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.342201 4702 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.342568 4702 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.343349 4702 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:45 crc kubenswrapper[4702]: I1125 10:45:45.343393 4702 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.343673 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="200ms" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.544819 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="400ms" Nov 25 10:45:45 crc kubenswrapper[4702]: E1125 10:45:45.945781 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="800ms" Nov 25 10:45:46 crc kubenswrapper[4702]: E1125 10:45:46.285644 4702 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/events\": dial tcp 38.102.83.46:6443: connect: connection refused" event="&Event{ObjectMeta:{mariadb-operator-controller-manager-656f99cfb7-jldk8.187b3a1437d028b4 openstack-operators 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openstack-operators,Name:mariadb-operator-controller-manager-656f99cfb7-jldk8,UID:c7e80c7d-91e3-4953-bf91-d35441e38743,APIVersion:v1,ResourceVersion:31487,FieldPath:spec.containers{manager},},Reason:Pulled,Message:Successfully pulled image \"quay.io/openstack-k8s-operators/mariadb-operator@sha256:888edf6f432e52eaa5fc3caeae616fe38a3302b006bbba0e38885b2beba9f0f2\" in 4.123s (4.123s including waiting). Image size: 189259986 bytes.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2025-11-25 10:45:37.555826868 +0000 UTC m=+834.922422557,LastTimestamp:2025-11-25 10:45:37.555826868 +0000 UTC m=+834.922422557,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Nov 25 10:45:46 crc kubenswrapper[4702]: E1125 10:45:46.747343 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="1.6s" Nov 25 10:45:48 crc kubenswrapper[4702]: E1125 10:45:48.348626 4702 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.46:6443: connect: connection refused" interval="3.2s" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.402236 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.406394 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.406738 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.407310 4702 status_manager.go:851] "Failed to get status for pod" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-g5m5h\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.416288 4702 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.416335 4702 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:49 crc kubenswrapper[4702]: E1125 10:45:49.416737 4702 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.417347 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:49 crc kubenswrapper[4702]: I1125 10:45:49.605274 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"2c504e854b0a820ecafe91b6167cc71f4256817c3538b39dc4ca09aa8bea0b35"} Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.613816 4702 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="1980cd4c5016f67bfa88ef5a7ad12b099c79a520b4d34f40727bb5877b52d669" exitCode=0 Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.613941 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"1980cd4c5016f67bfa88ef5a7ad12b099c79a520b4d34f40727bb5877b52d669"} Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.614164 4702 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.614185 4702 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:50 crc kubenswrapper[4702]: E1125 10:45:50.614626 4702 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.614640 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.615112 4702 status_manager.go:851] "Failed to get status for pod" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-g5m5h\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.615559 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.617349 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.617398 4702 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3" exitCode=1 Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.617427 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3"} Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.617952 4702 scope.go:117] "RemoveContainer" containerID="f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.618192 4702 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.618593 4702 status_manager.go:851] "Failed to get status for pod" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openstack-operators/pods/mariadb-operator-controller-manager-656f99cfb7-jldk8\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.619049 4702 status_manager.go:851] "Failed to get status for pod" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:50 crc kubenswrapper[4702]: I1125 10:45:50.619461 4702 status_manager.go:851] "Failed to get status for pod" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-machine-config-operator/pods/machine-config-daemon-g5m5h\": dial tcp 38.102.83.46:6443: connect: connection refused" Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.260802 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.633875 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"b8bb449b7d7adb805bcfe6fb44966eb65cac20158ba2d448da2d687029a944ff"} Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.633932 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c0304a472e0c561a8f339d6c29d6a3eee2f5364be36cdf9aee129302354a7b1c"} Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.633943 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"05a0fdfb7a9f563291b5aecc6f76edbc5779bd67e4acf914ab9a97bcf1854b37"} Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.633958 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"8cce877ccfeeafb6eb5620edb4f5b6317ddfc06020731176fac274d4bdabf1fc"} Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.637052 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 25 10:45:51 crc kubenswrapper[4702]: I1125 10:45:51.637140 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"f084fa5d72f0dc33cce288046f2bbfc26008f4583480c4bb92b283a84599e0e7"} Nov 25 10:45:52 crc kubenswrapper[4702]: I1125 10:45:52.647304 4702 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:52 crc kubenswrapper[4702]: I1125 10:45:52.647650 4702 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:52 crc kubenswrapper[4702]: I1125 10:45:52.647524 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"10ee1a65f0140b55eb1d3ddf87593cb763b452bc03211b767322610480a69959"} Nov 25 10:45:52 crc kubenswrapper[4702]: I1125 10:45:52.647763 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:52 crc kubenswrapper[4702]: I1125 10:45:52.982421 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:45:52 crc kubenswrapper[4702]: I1125 10:45:52.983008 4702 scope.go:117] "RemoveContainer" containerID="8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273" Nov 25 10:45:53 crc kubenswrapper[4702]: I1125 10:45:53.652981 4702 generic.go:334] "Generic (PLEG): container finished" podID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerID="39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08" exitCode=1 Nov 25 10:45:53 crc kubenswrapper[4702]: I1125 10:45:53.653068 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerDied","Data":"39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08"} Nov 25 10:45:53 crc kubenswrapper[4702]: I1125 10:45:53.653339 4702 scope.go:117] "RemoveContainer" containerID="8a9a089b6bf2cada9b6b48b1df83f51eb4c91bccb4e100b0e27121ce12527273" Nov 25 10:45:53 crc kubenswrapper[4702]: I1125 10:45:53.653850 4702 scope.go:117] "RemoveContainer" containerID="39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08" Nov 25 10:45:53 crc kubenswrapper[4702]: E1125 10:45:53.654093 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:45:54 crc kubenswrapper[4702]: I1125 10:45:54.417492 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:54 crc kubenswrapper[4702]: I1125 10:45:54.417564 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:54 crc kubenswrapper[4702]: I1125 10:45:54.423807 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:57 crc kubenswrapper[4702]: I1125 10:45:57.657151 4702 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:58 crc kubenswrapper[4702]: I1125 10:45:58.680641 4702 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:58 crc kubenswrapper[4702]: I1125 10:45:58.681584 4702 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:58 crc kubenswrapper[4702]: I1125 10:45:58.683983 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:45:58 crc kubenswrapper[4702]: I1125 10:45:58.686475 4702 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e6f3dc23-2972-4a5e-a3b6-270a935849ad" Nov 25 10:45:59 crc kubenswrapper[4702]: I1125 10:45:59.686531 4702 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:59 crc kubenswrapper[4702]: I1125 10:45:59.686873 4702 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="06ceeb0a-a429-4102-a32d-1918c25ddc8c" Nov 25 10:45:59 crc kubenswrapper[4702]: I1125 10:45:59.763980 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:46:01 crc kubenswrapper[4702]: I1125 10:46:01.260853 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:46:01 crc kubenswrapper[4702]: I1125 10:46:01.261133 4702 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Nov 25 10:46:01 crc kubenswrapper[4702]: I1125 10:46:01.261179 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Nov 25 10:46:02 crc kubenswrapper[4702]: I1125 10:46:02.981570 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:46:02 crc kubenswrapper[4702]: I1125 10:46:02.982758 4702 scope.go:117] "RemoveContainer" containerID="39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08" Nov 25 10:46:02 crc kubenswrapper[4702]: E1125 10:46:02.983057 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:46:03 crc kubenswrapper[4702]: I1125 10:46:03.415451 4702 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="e6f3dc23-2972-4a5e-a3b6-270a935849ad" Nov 25 10:46:07 crc kubenswrapper[4702]: I1125 10:46:07.315361 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Nov 25 10:46:07 crc kubenswrapper[4702]: I1125 10:46:07.688247 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Nov 25 10:46:07 crc kubenswrapper[4702]: I1125 10:46:07.770435 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Nov 25 10:46:07 crc kubenswrapper[4702]: I1125 10:46:07.788951 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Nov 25 10:46:07 crc kubenswrapper[4702]: I1125 10:46:07.810409 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-76znq" Nov 25 10:46:07 crc kubenswrapper[4702]: I1125 10:46:07.836826 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Nov 25 10:46:08 crc kubenswrapper[4702]: I1125 10:46:08.314692 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Nov 25 10:46:08 crc kubenswrapper[4702]: I1125 10:46:08.652308 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Nov 25 10:46:08 crc kubenswrapper[4702]: I1125 10:46:08.724080 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Nov 25 10:46:08 crc kubenswrapper[4702]: I1125 10:46:08.724595 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Nov 25 10:46:08 crc kubenswrapper[4702]: I1125 10:46:08.934668 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Nov 25 10:46:08 crc kubenswrapper[4702]: I1125 10:46:08.953731 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.400697 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.474931 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.538765 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.575593 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.852831 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.867426 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Nov 25 10:46:09 crc kubenswrapper[4702]: I1125 10:46:09.951128 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.099622 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.102375 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.222253 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.245130 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.304687 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.476659 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.501487 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.509663 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.514392 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.516029 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Nov 25 10:46:10 crc kubenswrapper[4702]: I1125 10:46:10.996843 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.051158 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.065231 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.081045 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.115463 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.134839 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.217462 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.260664 4702 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.260734 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.264690 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.285891 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.325334 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.381707 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.414539 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.507670 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.573011 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.584623 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.631974 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.746779 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.801121 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.821970 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.867429 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.962420 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Nov 25 10:46:11 crc kubenswrapper[4702]: I1125 10:46:11.990319 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.031352 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.067884 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.185941 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.249223 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.249239 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.361029 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.429590 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.467752 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.497965 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.538499 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.709630 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.770690 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.805535 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.837215 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.847075 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.903340 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.953817 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.978833 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.981463 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.982280 4702 scope.go:117] "RemoveContainer" containerID="39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08" Nov 25 10:46:12 crc kubenswrapper[4702]: E1125 10:46:12.982493 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 20s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.991603 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Nov 25 10:46:12 crc kubenswrapper[4702]: I1125 10:46:12.999391 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.166449 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.368136 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.380668 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.399008 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.513747 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.531491 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.589972 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.626408 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.656421 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.711314 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.727638 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.746581 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.776919 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.806793 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Nov 25 10:46:13 crc kubenswrapper[4702]: I1125 10:46:13.862306 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.016774 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.023233 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.057959 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.087509 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.110964 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.180285 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.193127 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.199891 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.343049 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.448129 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.450124 4702 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.454657 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.454702 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.459807 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.480871 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-48592" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.492459 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=17.492437776 podStartE2EDuration="17.492437776s" podCreationTimestamp="2025-11-25 10:45:57 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:46:14.491474078 +0000 UTC m=+871.858069767" watchObservedRunningTime="2025-11-25 10:46:14.492437776 +0000 UTC m=+871.859033465" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.504587 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.526257 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.674850 4702 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.680012 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.690241 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.801031 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.906052 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Nov 25 10:46:14 crc kubenswrapper[4702]: I1125 10:46:14.917598 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.061187 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.149743 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.171746 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.233151 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.246718 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.282846 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.307315 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.478235 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.563016 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.611316 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.658642 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Nov 25 10:46:15 crc kubenswrapper[4702]: I1125 10:46:15.698476 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.038457 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.146691 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.295956 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.364375 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.433055 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.458423 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.459768 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.480356 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.576145 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.577858 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.614725 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.637315 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.672187 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.687014 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.717392 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.784029 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.830663 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.918477 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Nov 25 10:46:16 crc kubenswrapper[4702]: I1125 10:46:16.965441 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.060788 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.132113 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.134083 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.141940 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.149894 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.211870 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.288470 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.288491 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.288620 4702 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.288470 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.289025 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.295122 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.480080 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.513993 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.515720 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.558070 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.569093 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.599102 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.607255 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.709430 4702 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.777006 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.879781 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.891231 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.896883 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.907458 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Nov 25 10:46:17 crc kubenswrapper[4702]: I1125 10:46:17.993615 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.026352 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.036242 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.069852 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.142372 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.142446 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.169996 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.209231 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.250720 4702 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.302125 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.380257 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.399877 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.461964 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.532215 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.546496 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.568409 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.618254 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.698169 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.710032 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.712428 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.786993 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.798806 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.843021 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.917088 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.946372 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Nov 25 10:46:18 crc kubenswrapper[4702]: I1125 10:46:18.997792 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.090125 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.138343 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.139651 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.140336 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.155418 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.192693 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.241723 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.266120 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.273636 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.391866 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.593012 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.675114 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.707670 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.748879 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.790249 4702 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.803700 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.808638 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.815416 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.877148 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.937877 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Nov 25 10:46:19 crc kubenswrapper[4702]: I1125 10:46:19.992132 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.026930 4702 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.027149 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7" gracePeriod=5 Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.081996 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.095424 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.146168 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.229875 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.262407 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.319222 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.324143 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.364236 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.428355 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.432253 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.515988 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.583228 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.751177 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.757424 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.768053 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.877832 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Nov 25 10:46:20 crc kubenswrapper[4702]: I1125 10:46:20.958863 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.066762 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.112120 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.151497 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.213932 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.215462 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.251260 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.260858 4702 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/kube-controller-manager namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" start-of-body= Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.260944 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" probeResult="failure" output="Get \"https://192.168.126.11:10257/healthz\": dial tcp 192.168.126.11:10257: connect: connection refused" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.260997 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.261536 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="kube-controller-manager" containerStatusID={"Type":"cri-o","ID":"f084fa5d72f0dc33cce288046f2bbfc26008f4583480c4bb92b283a84599e0e7"} pod="openshift-kube-controller-manager/kube-controller-manager-crc" containerMessage="Container kube-controller-manager failed startup probe, will be restarted" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.261637 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="kube-controller-manager" containerID="cri-o://f084fa5d72f0dc33cce288046f2bbfc26008f4583480c4bb92b283a84599e0e7" gracePeriod=30 Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.266244 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.353801 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.645698 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.728384 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.809457 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.899253 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Nov 25 10:46:21 crc kubenswrapper[4702]: I1125 10:46:21.912210 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.031741 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.066990 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.114508 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.299507 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.392593 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.761738 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.794705 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Nov 25 10:46:22 crc kubenswrapper[4702]: I1125 10:46:22.873200 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.253000 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.560942 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.678091 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.698047 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.714338 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.715418 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Nov 25 10:46:23 crc kubenswrapper[4702]: I1125 10:46:23.832153 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Nov 25 10:46:24 crc kubenswrapper[4702]: I1125 10:46:24.402665 4702 scope.go:117] "RemoveContainer" containerID="39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08" Nov 25 10:46:24 crc kubenswrapper[4702]: I1125 10:46:24.821727 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerStarted","Data":"af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28"} Nov 25 10:46:24 crc kubenswrapper[4702]: I1125 10:46:24.821938 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:46:24 crc kubenswrapper[4702]: I1125 10:46:24.843954 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podStartSLOduration=48.719928525 podStartE2EDuration="52.843928835s" podCreationTimestamp="2025-11-25 10:45:32 +0000 UTC" firstStartedPulling="2025-11-25 10:45:33.431812567 +0000 UTC m=+830.798408256" lastFinishedPulling="2025-11-25 10:45:37.555812877 +0000 UTC m=+834.922408566" observedRunningTime="2025-11-25 10:46:24.839209338 +0000 UTC m=+882.205805047" watchObservedRunningTime="2025-11-25 10:46:24.843928835 +0000 UTC m=+882.210524524" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.597091 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.597863 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.696228 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.696616 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.696768 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.696375 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.696667 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697060 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697142 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697248 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697310 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697649 4702 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697747 4702 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697825 4702 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.697892 4702 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.706288 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.799819 4702 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.830645 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.830698 4702 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7" exitCode=137 Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.830766 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.830832 4702 scope.go:117] "RemoveContainer" containerID="f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.848123 4702 scope.go:117] "RemoveContainer" containerID="f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7" Nov 25 10:46:25 crc kubenswrapper[4702]: E1125 10:46:25.848626 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7\": container with ID starting with f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7 not found: ID does not exist" containerID="f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7" Nov 25 10:46:25 crc kubenswrapper[4702]: I1125 10:46:25.848682 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7"} err="failed to get container status \"f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7\": rpc error: code = NotFound desc = could not find container \"f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7\": container with ID starting with f8e1fc58e170141b1f148c768685261e282cdd15619da5a2950ddccf529f82a7 not found: ID does not exist" Nov 25 10:46:27 crc kubenswrapper[4702]: I1125 10:46:27.409103 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.262821 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-scblb"] Nov 25 10:46:31 crc kubenswrapper[4702]: E1125 10:46:31.263184 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.263201 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 25 10:46:31 crc kubenswrapper[4702]: E1125 10:46:31.263220 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" containerName="installer" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.263228 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" containerName="installer" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.263354 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.263367 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="4b8a5a80-f55f-4479-be86-0c8dfaaaa4cc" containerName="installer" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.264276 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.315503 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scblb"] Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.382578 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vnxsc\" (UniqueName: \"kubernetes.io/projected/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-kube-api-access-vnxsc\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.382642 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-utilities\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.382673 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-catalog-content\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.483547 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vnxsc\" (UniqueName: \"kubernetes.io/projected/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-kube-api-access-vnxsc\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.483837 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-utilities\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.483960 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-catalog-content\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.484364 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-utilities\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.484372 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-catalog-content\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.502128 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vnxsc\" (UniqueName: \"kubernetes.io/projected/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-kube-api-access-vnxsc\") pod \"redhat-marketplace-scblb\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.590852 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.782703 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-scblb"] Nov 25 10:46:31 crc kubenswrapper[4702]: W1125 10:46:31.787712 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod55dc663c_8dca_4d62_a20d_ffb82fdd5ee9.slice/crio-9926b7bd4e16787fea4c2e6c0a16c1743d0459e3d514a9b5d85fc234010af70f WatchSource:0}: Error finding container 9926b7bd4e16787fea4c2e6c0a16c1743d0459e3d514a9b5d85fc234010af70f: Status 404 returned error can't find the container with id 9926b7bd4e16787fea4c2e6c0a16c1743d0459e3d514a9b5d85fc234010af70f Nov 25 10:46:31 crc kubenswrapper[4702]: I1125 10:46:31.868838 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scblb" event={"ID":"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9","Type":"ContainerStarted","Data":"9926b7bd4e16787fea4c2e6c0a16c1743d0459e3d514a9b5d85fc234010af70f"} Nov 25 10:46:32 crc kubenswrapper[4702]: I1125 10:46:32.877264 4702 generic.go:334] "Generic (PLEG): container finished" podID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerID="64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98" exitCode=0 Nov 25 10:46:32 crc kubenswrapper[4702]: I1125 10:46:32.877316 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scblb" event={"ID":"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9","Type":"ContainerDied","Data":"64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98"} Nov 25 10:46:32 crc kubenswrapper[4702]: I1125 10:46:32.984633 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:46:33 crc kubenswrapper[4702]: I1125 10:46:33.885885 4702 generic.go:334] "Generic (PLEG): container finished" podID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerID="719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f" exitCode=0 Nov 25 10:46:33 crc kubenswrapper[4702]: I1125 10:46:33.885950 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scblb" event={"ID":"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9","Type":"ContainerDied","Data":"719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f"} Nov 25 10:46:34 crc kubenswrapper[4702]: I1125 10:46:34.895169 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scblb" event={"ID":"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9","Type":"ContainerStarted","Data":"1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7"} Nov 25 10:46:34 crc kubenswrapper[4702]: I1125 10:46:34.914197 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-scblb" podStartSLOduration=2.445152936 podStartE2EDuration="3.914161924s" podCreationTimestamp="2025-11-25 10:46:31 +0000 UTC" firstStartedPulling="2025-11-25 10:46:32.878881635 +0000 UTC m=+890.245477324" lastFinishedPulling="2025-11-25 10:46:34.347890623 +0000 UTC m=+891.714486312" observedRunningTime="2025-11-25 10:46:34.911038843 +0000 UTC m=+892.277634542" watchObservedRunningTime="2025-11-25 10:46:34.914161924 +0000 UTC m=+892.280757613" Nov 25 10:46:39 crc kubenswrapper[4702]: I1125 10:46:39.784975 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Nov 25 10:46:39 crc kubenswrapper[4702]: I1125 10:46:39.897614 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-x6blk"] Nov 25 10:46:39 crc kubenswrapper[4702]: I1125 10:46:39.899255 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:39 crc kubenswrapper[4702]: I1125 10:46:39.910597 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6blk"] Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.090559 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9tzdv\" (UniqueName: \"kubernetes.io/projected/c78fcba2-ffee-4ad3-bccd-085a90c81236-kube-api-access-9tzdv\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.090692 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-catalog-content\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.090733 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-utilities\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.137276 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.192165 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9tzdv\" (UniqueName: \"kubernetes.io/projected/c78fcba2-ffee-4ad3-bccd-085a90c81236-kube-api-access-9tzdv\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.192269 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-catalog-content\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.192300 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-utilities\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.192933 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-utilities\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.193323 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-catalog-content\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.213429 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9tzdv\" (UniqueName: \"kubernetes.io/projected/c78fcba2-ffee-4ad3-bccd-085a90c81236-kube-api-access-9tzdv\") pod \"certified-operators-x6blk\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.219248 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.422798 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-x6blk"] Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.937822 4702 generic.go:334] "Generic (PLEG): container finished" podID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerID="98655131736db000215edc624e1852c8761f2289b8d63e356f869300f55ddf02" exitCode=0 Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.937865 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerDied","Data":"98655131736db000215edc624e1852c8761f2289b8d63e356f869300f55ddf02"} Nov 25 10:46:40 crc kubenswrapper[4702]: I1125 10:46:40.937891 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerStarted","Data":"85c26e137c093486705de124609b0400b0b6f7f89021a6be1330fbd7616a1bc2"} Nov 25 10:46:41 crc kubenswrapper[4702]: I1125 10:46:41.591729 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:41 crc kubenswrapper[4702]: I1125 10:46:41.592516 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:41 crc kubenswrapper[4702]: I1125 10:46:41.636091 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:41 crc kubenswrapper[4702]: I1125 10:46:41.984024 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:46:42 crc kubenswrapper[4702]: I1125 10:46:42.949110 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerStarted","Data":"af614a0c2c218fe778a04ff296b9b57066d4922e2c12f2bdb0cc8f213f84809d"} Nov 25 10:46:43 crc kubenswrapper[4702]: I1125 10:46:43.956720 4702 generic.go:334] "Generic (PLEG): container finished" podID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerID="af614a0c2c218fe778a04ff296b9b57066d4922e2c12f2bdb0cc8f213f84809d" exitCode=0 Nov 25 10:46:43 crc kubenswrapper[4702]: I1125 10:46:43.956937 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerDied","Data":"af614a0c2c218fe778a04ff296b9b57066d4922e2c12f2bdb0cc8f213f84809d"} Nov 25 10:46:44 crc kubenswrapper[4702]: I1125 10:46:44.965411 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerStarted","Data":"b5e5f66efdaedeb410a56f03c27c69f594fc16c7b4ab2ece72166007fea6b432"} Nov 25 10:46:44 crc kubenswrapper[4702]: I1125 10:46:44.985557 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-x6blk" podStartSLOduration=2.313165943 podStartE2EDuration="5.985541892s" podCreationTimestamp="2025-11-25 10:46:39 +0000 UTC" firstStartedPulling="2025-11-25 10:46:40.940013493 +0000 UTC m=+898.306609182" lastFinishedPulling="2025-11-25 10:46:44.612389442 +0000 UTC m=+901.978985131" observedRunningTime="2025-11-25 10:46:44.983110202 +0000 UTC m=+902.349705911" watchObservedRunningTime="2025-11-25 10:46:44.985541892 +0000 UTC m=+902.352137581" Nov 25 10:46:49 crc kubenswrapper[4702]: I1125 10:46:49.651499 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Nov 25 10:46:49 crc kubenswrapper[4702]: I1125 10:46:49.953807 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-wg9lr"] Nov 25 10:46:49 crc kubenswrapper[4702]: I1125 10:46:49.955268 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:49 crc kubenswrapper[4702]: I1125 10:46:49.961085 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wg9lr"] Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.034785 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mrpbx\" (UniqueName: \"kubernetes.io/projected/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-kube-api-access-mrpbx\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.035161 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-catalog-content\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.035180 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-utilities\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.135925 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mrpbx\" (UniqueName: \"kubernetes.io/projected/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-kube-api-access-mrpbx\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.135991 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-utilities\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.136007 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-catalog-content\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.136470 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-catalog-content\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.136799 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-utilities\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.144272 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-46fx5"] Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.145599 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.160032 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46fx5"] Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.162510 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mrpbx\" (UniqueName: \"kubernetes.io/projected/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-kube-api-access-mrpbx\") pod \"certified-operators-wg9lr\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.219606 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.219911 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.237522 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8n59m\" (UniqueName: \"kubernetes.io/projected/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-kube-api-access-8n59m\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.237617 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-utilities\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.237686 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-catalog-content\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.261062 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.271728 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.338552 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8n59m\" (UniqueName: \"kubernetes.io/projected/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-kube-api-access-8n59m\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.338628 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-utilities\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.338671 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-catalog-content\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.339372 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-utilities\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.339559 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-catalog-content\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.358724 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8n59m\" (UniqueName: \"kubernetes.io/projected/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-kube-api-access-8n59m\") pod \"community-operators-46fx5\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.462007 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.644793 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-46fx5"] Nov 25 10:46:50 crc kubenswrapper[4702]: I1125 10:46:50.681875 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-wg9lr"] Nov 25 10:46:50 crc kubenswrapper[4702]: W1125 10:46:50.685069 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0738bd9_a74b_4aaa_a885_eae81ea2dc35.slice/crio-c94948f12b2d345b12227c4cb12d8bb51dbb5496a0dfb9f1811db1a9deb87700 WatchSource:0}: Error finding container c94948f12b2d345b12227c4cb12d8bb51dbb5496a0dfb9f1811db1a9deb87700: Status 404 returned error can't find the container with id c94948f12b2d345b12227c4cb12d8bb51dbb5496a0dfb9f1811db1a9deb87700 Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:50.999929 4702 generic.go:334] "Generic (PLEG): container finished" podID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerID="490617530ab7239bc926f99540292a6775c79a2aecaa7f04e8338a38d73a9890" exitCode=0 Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.000245 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46fx5" event={"ID":"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af","Type":"ContainerDied","Data":"490617530ab7239bc926f99540292a6775c79a2aecaa7f04e8338a38d73a9890"} Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.000273 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46fx5" event={"ID":"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af","Type":"ContainerStarted","Data":"5dc9039e236ca878f259055b1698fa5677703bc1911b8e3cf83fbb670912d99a"} Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.003577 4702 generic.go:334] "Generic (PLEG): container finished" podID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerID="c95cf99c9859a0a90ad2d51c33e30fa78b188a36585ca8ee27e23372a7563d1b" exitCode=0 Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.004028 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg9lr" event={"ID":"c0738bd9-a74b-4aaa-a885-eae81ea2dc35","Type":"ContainerDied","Data":"c95cf99c9859a0a90ad2d51c33e30fa78b188a36585ca8ee27e23372a7563d1b"} Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.004081 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg9lr" event={"ID":"c0738bd9-a74b-4aaa-a885-eae81ea2dc35","Type":"ContainerStarted","Data":"c94948f12b2d345b12227c4cb12d8bb51dbb5496a0dfb9f1811db1a9deb87700"} Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.049172 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.745206 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-gqzjk"] Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.746455 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.754823 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss7gc\" (UniqueName: \"kubernetes.io/projected/215f2731-6dc6-465a-a076-7a08feb8e5b6-kube-api-access-ss7gc\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.754873 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-utilities\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.754920 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-catalog-content\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.757354 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gqzjk"] Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.856388 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss7gc\" (UniqueName: \"kubernetes.io/projected/215f2731-6dc6-465a-a076-7a08feb8e5b6-kube-api-access-ss7gc\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.856457 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-utilities\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.856496 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-catalog-content\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.857072 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-catalog-content\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.857108 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-utilities\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:51 crc kubenswrapper[4702]: I1125 10:46:51.878622 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss7gc\" (UniqueName: \"kubernetes.io/projected/215f2731-6dc6-465a-a076-7a08feb8e5b6-kube-api-access-ss7gc\") pod \"redhat-marketplace-gqzjk\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.012178 4702 generic.go:334] "Generic (PLEG): container finished" podID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerID="eed2f54a4bd92355f520f32986da18b75c3745175035e0a9db410bd3138bb2a3" exitCode=0 Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.012267 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46fx5" event={"ID":"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af","Type":"ContainerDied","Data":"eed2f54a4bd92355f520f32986da18b75c3745175035e0a9db410bd3138bb2a3"} Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.014737 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.017668 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.017750 4702 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="f084fa5d72f0dc33cce288046f2bbfc26008f4583480c4bb92b283a84599e0e7" exitCode=137 Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.017843 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"f084fa5d72f0dc33cce288046f2bbfc26008f4583480c4bb92b283a84599e0e7"} Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.017891 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"1cb9a2c959ec92114b16bb5e24d6af8fdfbf0e0fc02ff5ec39a458b1c0261a4f"} Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.017933 4702 scope.go:117] "RemoveContainer" containerID="f4698783efac5260c46eec74b2721c3c398e2ef96b079259c74486ec597baeb3" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.064630 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.246519 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-gqzjk"] Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.744111 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-knkf9"] Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.745419 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.754628 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-knkf9"] Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.776858 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-utilities\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.776895 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-catalog-content\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.776954 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fk6ww\" (UniqueName: \"kubernetes.io/projected/e9dcc033-976b-440e-88ca-0c3b72212057-kube-api-access-fk6ww\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.878288 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-utilities\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.878347 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-catalog-content\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.878389 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fk6ww\" (UniqueName: \"kubernetes.io/projected/e9dcc033-976b-440e-88ca-0c3b72212057-kube-api-access-fk6ww\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.879054 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-utilities\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.879093 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-catalog-content\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:52 crc kubenswrapper[4702]: I1125 10:46:52.901169 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fk6ww\" (UniqueName: \"kubernetes.io/projected/e9dcc033-976b-440e-88ca-0c3b72212057-kube-api-access-fk6ww\") pod \"redhat-operators-knkf9\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.025126 4702 generic.go:334] "Generic (PLEG): container finished" podID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerID="eb7e9f68f7df7e15ec0fc3ab662c2a27e280714158a6e351bb3ae16dcbed1d1a" exitCode=0 Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.025265 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerDied","Data":"eb7e9f68f7df7e15ec0fc3ab662c2a27e280714158a6e351bb3ae16dcbed1d1a"} Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.025333 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerStarted","Data":"4db64d0388c36c243d9a80e82dbeb7ec0cc99cb3c9dbd2a619370d5f89a02695"} Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.027921 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46fx5" event={"ID":"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af","Type":"ContainerStarted","Data":"28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b"} Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.030482 4702 generic.go:334] "Generic (PLEG): container finished" podID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerID="53b110d0951b8e4d4e6d96c47f37d083ecfbb4be7c20de9a23d143d5aaada4f1" exitCode=0 Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.030562 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg9lr" event={"ID":"c0738bd9-a74b-4aaa-a885-eae81ea2dc35","Type":"ContainerDied","Data":"53b110d0951b8e4d4e6d96c47f37d083ecfbb4be7c20de9a23d143d5aaada4f1"} Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.033630 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/1.log" Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.064243 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-46fx5" podStartSLOduration=1.535017914 podStartE2EDuration="3.064221125s" podCreationTimestamp="2025-11-25 10:46:50 +0000 UTC" firstStartedPulling="2025-11-25 10:46:51.00220391 +0000 UTC m=+908.368799599" lastFinishedPulling="2025-11-25 10:46:52.531407121 +0000 UTC m=+909.898002810" observedRunningTime="2025-11-25 10:46:53.061748943 +0000 UTC m=+910.428344642" watchObservedRunningTime="2025-11-25 10:46:53.064221125 +0000 UTC m=+910.430816834" Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.093655 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:46:53 crc kubenswrapper[4702]: I1125 10:46:53.283404 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-knkf9"] Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.044425 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerStarted","Data":"6f9fd10c0b4adf0cf04ecdad8312bbf5cab2e31f03b353d339872eaa787d3a8b"} Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.048019 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg9lr" event={"ID":"c0738bd9-a74b-4aaa-a885-eae81ea2dc35","Type":"ContainerStarted","Data":"b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089"} Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.050734 4702 generic.go:334] "Generic (PLEG): container finished" podID="e9dcc033-976b-440e-88ca-0c3b72212057" containerID="8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd" exitCode=0 Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.050831 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerDied","Data":"8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd"} Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.050965 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerStarted","Data":"e90fc578c85e73520f220c9fefe71a8a2a3d1325a9aa20590274246b1f247369"} Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.105853 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-wg9lr" podStartSLOduration=2.585163977 podStartE2EDuration="5.1058262s" podCreationTimestamp="2025-11-25 10:46:49 +0000 UTC" firstStartedPulling="2025-11-25 10:46:51.005925049 +0000 UTC m=+908.372520738" lastFinishedPulling="2025-11-25 10:46:53.526587272 +0000 UTC m=+910.893182961" observedRunningTime="2025-11-25 10:46:54.103569064 +0000 UTC m=+911.470164753" watchObservedRunningTime="2025-11-25 10:46:54.1058262 +0000 UTC m=+911.472421879" Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.950827 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zcs6g"] Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.952600 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:54 crc kubenswrapper[4702]: I1125 10:46:54.963457 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zcs6g"] Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.004880 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-utilities\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.004966 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-catalog-content\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.005014 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jf69c\" (UniqueName: \"kubernetes.io/projected/de760b35-119f-4975-8eeb-76e8f9adb9f1-kube-api-access-jf69c\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.106665 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-utilities\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.106743 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-catalog-content\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.106808 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jf69c\" (UniqueName: \"kubernetes.io/projected/de760b35-119f-4975-8eeb-76e8f9adb9f1-kube-api-access-jf69c\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.107687 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-utilities\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.107989 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-catalog-content\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.126495 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jf69c\" (UniqueName: \"kubernetes.io/projected/de760b35-119f-4975-8eeb-76e8f9adb9f1-kube-api-access-jf69c\") pod \"certified-operators-zcs6g\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.274309 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.348946 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-jxsrw"] Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.350489 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.360748 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxsrw"] Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.413407 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-catalog-content\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.413470 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-utilities\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.413496 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-klcpn\" (UniqueName: \"kubernetes.io/projected/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-kube-api-access-klcpn\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.515307 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-catalog-content\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.515664 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-utilities\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.515695 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-klcpn\" (UniqueName: \"kubernetes.io/projected/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-kube-api-access-klcpn\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.515993 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-catalog-content\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.516342 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-utilities\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.538138 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-klcpn\" (UniqueName: \"kubernetes.io/projected/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-kube-api-access-klcpn\") pod \"redhat-marketplace-jxsrw\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.583656 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zcs6g"] Nov 25 10:46:55 crc kubenswrapper[4702]: W1125 10:46:55.589012 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podde760b35_119f_4975_8eeb_76e8f9adb9f1.slice/crio-03843430be3faae62305e427e7079166c795eff6ba79717eecb695066dddeb9c WatchSource:0}: Error finding container 03843430be3faae62305e427e7079166c795eff6ba79717eecb695066dddeb9c: Status 404 returned error can't find the container with id 03843430be3faae62305e427e7079166c795eff6ba79717eecb695066dddeb9c Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.665795 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:46:55 crc kubenswrapper[4702]: I1125 10:46:55.906252 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxsrw"] Nov 25 10:46:55 crc kubenswrapper[4702]: W1125 10:46:55.912696 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda2a179ea_7517_4ae8_adb3_15ddc3b759fa.slice/crio-3c76bcde9c4a20d3ecf2f5869a68323ef31fe53655a5207579db184a9d31df7f WatchSource:0}: Error finding container 3c76bcde9c4a20d3ecf2f5869a68323ef31fe53655a5207579db184a9d31df7f: Status 404 returned error can't find the container with id 3c76bcde9c4a20d3ecf2f5869a68323ef31fe53655a5207579db184a9d31df7f Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.066250 4702 generic.go:334] "Generic (PLEG): container finished" podID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerID="abba02ee45d72287f86337761e2f571c705f0c164992cb25e45c89119df2b4b4" exitCode=0 Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.066348 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerDied","Data":"abba02ee45d72287f86337761e2f571c705f0c164992cb25e45c89119df2b4b4"} Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.066395 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerStarted","Data":"03843430be3faae62305e427e7079166c795eff6ba79717eecb695066dddeb9c"} Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.067997 4702 generic.go:334] "Generic (PLEG): container finished" podID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerID="8799a28dbc318bae877852a246807291e4169ad4407afbaedc9dcf3f546eda27" exitCode=0 Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.068027 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxsrw" event={"ID":"a2a179ea-7517-4ae8-adb3-15ddc3b759fa","Type":"ContainerDied","Data":"8799a28dbc318bae877852a246807291e4169ad4407afbaedc9dcf3f546eda27"} Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.068070 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxsrw" event={"ID":"a2a179ea-7517-4ae8-adb3-15ddc3b759fa","Type":"ContainerStarted","Data":"3c76bcde9c4a20d3ecf2f5869a68323ef31fe53655a5207579db184a9d31df7f"} Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.070430 4702 generic.go:334] "Generic (PLEG): container finished" podID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerID="6f9fd10c0b4adf0cf04ecdad8312bbf5cab2e31f03b353d339872eaa787d3a8b" exitCode=0 Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.070514 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerDied","Data":"6f9fd10c0b4adf0cf04ecdad8312bbf5cab2e31f03b353d339872eaa787d3a8b"} Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.073518 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerStarted","Data":"b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3"} Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.945383 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-h8j5p"] Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.946670 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:56 crc kubenswrapper[4702]: I1125 10:46:56.976410 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h8j5p"] Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.038922 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-utilities\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.038971 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-catalog-content\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.039122 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zfdjw\" (UniqueName: \"kubernetes.io/projected/69f8b3df-cceb-485b-b985-7bdad0788aef-kube-api-access-zfdjw\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.096130 4702 generic.go:334] "Generic (PLEG): container finished" podID="e9dcc033-976b-440e-88ca-0c3b72212057" containerID="b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3" exitCode=0 Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.096281 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerDied","Data":"b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3"} Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.103373 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerStarted","Data":"fc910157359e7b3e7050f7a6f381c53f4d647a501253657d603dd0ba134318bb"} Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.118000 4702 generic.go:334] "Generic (PLEG): container finished" podID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerID="2211c78e4f70b91f2424a7cb45761db3e2b0827b0e41dbbe17450ae991589392" exitCode=0 Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.118105 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxsrw" event={"ID":"a2a179ea-7517-4ae8-adb3-15ddc3b759fa","Type":"ContainerDied","Data":"2211c78e4f70b91f2424a7cb45761db3e2b0827b0e41dbbe17450ae991589392"} Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.124155 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerStarted","Data":"1f23c6ae047c3758b97a19b0c7d6b75e736fd177f6640f03a6b833c0d09a06af"} Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.140713 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-catalog-content\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.140850 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zfdjw\" (UniqueName: \"kubernetes.io/projected/69f8b3df-cceb-485b-b985-7bdad0788aef-kube-api-access-zfdjw\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.140970 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-utilities\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.141513 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-utilities\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.142414 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-catalog-content\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.146419 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-gqzjk" podStartSLOduration=2.638700842 podStartE2EDuration="6.146402591s" podCreationTimestamp="2025-11-25 10:46:51 +0000 UTC" firstStartedPulling="2025-11-25 10:46:53.026737741 +0000 UTC m=+910.393333430" lastFinishedPulling="2025-11-25 10:46:56.53443949 +0000 UTC m=+913.901035179" observedRunningTime="2025-11-25 10:46:57.146300138 +0000 UTC m=+914.512895847" watchObservedRunningTime="2025-11-25 10:46:57.146402591 +0000 UTC m=+914.512998280" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.169703 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zfdjw\" (UniqueName: \"kubernetes.io/projected/69f8b3df-cceb-485b-b985-7bdad0788aef-kube-api-access-zfdjw\") pod \"certified-operators-h8j5p\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.262130 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:46:57 crc kubenswrapper[4702]: I1125 10:46:57.491456 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-h8j5p"] Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.132714 4702 generic.go:334] "Generic (PLEG): container finished" podID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerID="1f53ec9b9317ce69c14109e5831e171a7be12e554ea72f44a87ae7f793a0b33d" exitCode=0 Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.132845 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerDied","Data":"1f53ec9b9317ce69c14109e5831e171a7be12e554ea72f44a87ae7f793a0b33d"} Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.134010 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerStarted","Data":"8f89130a0f49d3d5aae94b95273388eb46ddd4537c19cc7d3a043d807853d302"} Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.136576 4702 generic.go:334] "Generic (PLEG): container finished" podID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerID="fc910157359e7b3e7050f7a6f381c53f4d647a501253657d603dd0ba134318bb" exitCode=0 Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.136644 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerDied","Data":"fc910157359e7b3e7050f7a6f381c53f4d647a501253657d603dd0ba134318bb"} Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.543183 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5hzg9"] Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.544872 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.554421 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5hzg9"] Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.559969 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-catalog-content\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.560013 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-utilities\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.560038 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ptsx9\" (UniqueName: \"kubernetes.io/projected/79c9c319-b87f-4dae-9744-03ef948bf068-kube-api-access-ptsx9\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.660788 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-catalog-content\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.660853 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-utilities\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.660896 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ptsx9\" (UniqueName: \"kubernetes.io/projected/79c9c319-b87f-4dae-9744-03ef948bf068-kube-api-access-ptsx9\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.661411 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-utilities\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.661420 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-catalog-content\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.685170 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ptsx9\" (UniqueName: \"kubernetes.io/projected/79c9c319-b87f-4dae-9744-03ef948bf068-kube-api-access-ptsx9\") pod \"redhat-operators-5hzg9\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:58 crc kubenswrapper[4702]: I1125 10:46:58.860626 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:46:59 crc kubenswrapper[4702]: I1125 10:46:59.630719 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5hzg9"] Nov 25 10:46:59 crc kubenswrapper[4702]: I1125 10:46:59.763187 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.152187 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerStarted","Data":"0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df"} Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.153242 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerStarted","Data":"b7e84d29ad605cc26f7c6c5ddc5468a7ab33f47f6c49dd8c320c56dcc0501601"} Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.272153 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.272236 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.326392 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.463201 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.464016 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:47:00 crc kubenswrapper[4702]: I1125 10:47:00.511815 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.176110 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerStarted","Data":"03247ff6e8c0d7878d3dd0d3e07c678dbcf64411ff7e81b07330c010096be7a7"} Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.183150 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerStarted","Data":"562e354a5586b396dfa7214776c25fc833960ae46681ea259d0fb8b8ce575fd0"} Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.190215 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxsrw" event={"ID":"a2a179ea-7517-4ae8-adb3-15ddc3b759fa","Type":"ContainerStarted","Data":"783d6bd458aec5722b89c608e56097944471206201e4df1cd031fbbda7dcfca3"} Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.194546 4702 generic.go:334] "Generic (PLEG): container finished" podID="79c9c319-b87f-4dae-9744-03ef948bf068" containerID="e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7" exitCode=0 Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.195585 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerDied","Data":"e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7"} Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.210547 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zcs6g" podStartSLOduration=2.767399746 podStartE2EDuration="7.210530432s" podCreationTimestamp="2025-11-25 10:46:54 +0000 UTC" firstStartedPulling="2025-11-25 10:46:56.068080395 +0000 UTC m=+913.434676084" lastFinishedPulling="2025-11-25 10:47:00.511211081 +0000 UTC m=+917.877806770" observedRunningTime="2025-11-25 10:47:01.206588027 +0000 UTC m=+918.573183726" watchObservedRunningTime="2025-11-25 10:47:01.210530432 +0000 UTC m=+918.577126121" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.239196 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-knkf9" podStartSLOduration=4.091534327 podStartE2EDuration="9.239172267s" podCreationTimestamp="2025-11-25 10:46:52 +0000 UTC" firstStartedPulling="2025-11-25 10:46:54.052309329 +0000 UTC m=+911.418905018" lastFinishedPulling="2025-11-25 10:46:59.199947269 +0000 UTC m=+916.566542958" observedRunningTime="2025-11-25 10:47:01.230227896 +0000 UTC m=+918.596823585" watchObservedRunningTime="2025-11-25 10:47:01.239172267 +0000 UTC m=+918.605767956" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.257257 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.257423 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.260560 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.265688 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.303213 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-jxsrw" podStartSLOduration=1.966983967 podStartE2EDuration="6.303192245s" podCreationTimestamp="2025-11-25 10:46:55 +0000 UTC" firstStartedPulling="2025-11-25 10:46:56.069531587 +0000 UTC m=+913.436127276" lastFinishedPulling="2025-11-25 10:47:00.405739865 +0000 UTC m=+917.772335554" observedRunningTime="2025-11-25 10:47:01.297828519 +0000 UTC m=+918.664424228" watchObservedRunningTime="2025-11-25 10:47:01.303192245 +0000 UTC m=+918.669787934" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.357493 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ffp8k"] Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.359003 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.376067 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffp8k"] Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.502701 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-catalog-content\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.502767 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-utilities\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.502825 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fsmdk\" (UniqueName: \"kubernetes.io/projected/73a2fc01-b4e5-413b-ba71-d37d5853d135-kube-api-access-fsmdk\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.605105 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-catalog-content\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.606132 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-catalog-content\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.606189 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-utilities\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.606218 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-utilities\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.606646 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fsmdk\" (UniqueName: \"kubernetes.io/projected/73a2fc01-b4e5-413b-ba71-d37d5853d135-kube-api-access-fsmdk\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.630329 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fsmdk\" (UniqueName: \"kubernetes.io/projected/73a2fc01-b4e5-413b-ba71-d37d5853d135-kube-api-access-fsmdk\") pod \"redhat-marketplace-ffp8k\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:01 crc kubenswrapper[4702]: I1125 10:47:01.680603 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.065149 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.065531 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.120518 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.160274 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffp8k"] Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.206423 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerStarted","Data":"d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531"} Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.207864 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerStarted","Data":"dddd42a4e444fe8d3e272b24af6b08fc6027b2df3c7eef34dc3af79b8ca08cd2"} Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.212339 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.283800 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.544626 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2x6vv"] Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.545946 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.556646 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2x6vv"] Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.629671 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pd5vv\" (UniqueName: \"kubernetes.io/projected/e57438f5-de09-4857-b5fc-e67b4c8c443d-kube-api-access-pd5vv\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.629751 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-catalog-content\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.629782 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-utilities\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.730783 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-catalog-content\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.730835 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-utilities\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.730889 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pd5vv\" (UniqueName: \"kubernetes.io/projected/e57438f5-de09-4857-b5fc-e67b4c8c443d-kube-api-access-pd5vv\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.731577 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-catalog-content\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.731804 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-utilities\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.754393 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pd5vv\" (UniqueName: \"kubernetes.io/projected/e57438f5-de09-4857-b5fc-e67b4c8c443d-kube-api-access-pd5vv\") pod \"certified-operators-2x6vv\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:02 crc kubenswrapper[4702]: I1125 10:47:02.861663 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.094799 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.096365 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.134843 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2x6vv"] Nov 25 10:47:03 crc kubenswrapper[4702]: W1125 10:47:03.159863 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode57438f5_de09_4857_b5fc_e67b4c8c443d.slice/crio-e8f8439a54e2ebc88a9d8372bfcaa4739c039f4b98a62b9fee947407a1fd2142 WatchSource:0}: Error finding container e8f8439a54e2ebc88a9d8372bfcaa4739c039f4b98a62b9fee947407a1fd2142: Status 404 returned error can't find the container with id e8f8439a54e2ebc88a9d8372bfcaa4739c039f4b98a62b9fee947407a1fd2142 Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.214256 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerStarted","Data":"e8f8439a54e2ebc88a9d8372bfcaa4739c039f4b98a62b9fee947407a1fd2142"} Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.216607 4702 generic.go:334] "Generic (PLEG): container finished" podID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerID="562e354a5586b396dfa7214776c25fc833960ae46681ea259d0fb8b8ce575fd0" exitCode=0 Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.216692 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerDied","Data":"562e354a5586b396dfa7214776c25fc833960ae46681ea259d0fb8b8ce575fd0"} Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.219490 4702 generic.go:334] "Generic (PLEG): container finished" podID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerID="a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5" exitCode=0 Nov 25 10:47:03 crc kubenswrapper[4702]: I1125 10:47:03.220157 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerDied","Data":"a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5"} Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.149816 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-knkf9" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:04 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:04 crc kubenswrapper[4702]: > Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.229001 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerStarted","Data":"e2032e149fb4598772a532e45cd059217a797b1b3b6bca41f6a69ad3f64a5950"} Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.342446 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-p8jqc"] Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.343546 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.352174 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8jqc"] Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.451832 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-utilities\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.465983 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-catalog-content\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.466108 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-76lr4\" (UniqueName: \"kubernetes.io/projected/ab9c17d4-6fbd-4439-85ff-80db813e03a8-kube-api-access-76lr4\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.566971 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-utilities\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.567049 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-catalog-content\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.567097 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-76lr4\" (UniqueName: \"kubernetes.io/projected/ab9c17d4-6fbd-4439-85ff-80db813e03a8-kube-api-access-76lr4\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.567575 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-utilities\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.567851 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-catalog-content\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.593689 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-76lr4\" (UniqueName: \"kubernetes.io/projected/ab9c17d4-6fbd-4439-85ff-80db813e03a8-kube-api-access-76lr4\") pod \"redhat-operators-p8jqc\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.660071 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.945100 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-g5k2q"] Nov 25 10:47:04 crc kubenswrapper[4702]: I1125 10:47:04.947827 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.009698 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5k2q"] Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.074138 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-utilities\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.074238 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-65kts\" (UniqueName: \"kubernetes.io/projected/243dc11a-4786-46eb-b000-2ccab5aeb028-kube-api-access-65kts\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.074413 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-catalog-content\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.083985 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-p8jqc"] Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.175465 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-catalog-content\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.175859 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-utilities\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.175890 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-65kts\" (UniqueName: \"kubernetes.io/projected/243dc11a-4786-46eb-b000-2ccab5aeb028-kube-api-access-65kts\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.176452 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-catalog-content\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.176527 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-utilities\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.194991 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-65kts\" (UniqueName: \"kubernetes.io/projected/243dc11a-4786-46eb-b000-2ccab5aeb028-kube-api-access-65kts\") pod \"redhat-marketplace-g5k2q\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.235640 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerStarted","Data":"1451674505b3a58a4891e4f82ba8ef635f5a660ce737753ec406e05d032b5c9e"} Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.269780 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.274789 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.274957 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.320039 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.630724 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5k2q"] Nov 25 10:47:05 crc kubenswrapper[4702]: W1125 10:47:05.639780 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod243dc11a_4786_46eb_b000_2ccab5aeb028.slice/crio-185076190e74405ac380afdd6f8d79230835b27e21365b09bfd2514803a27ed4 WatchSource:0}: Error finding container 185076190e74405ac380afdd6f8d79230835b27e21365b09bfd2514803a27ed4: Status 404 returned error can't find the container with id 185076190e74405ac380afdd6f8d79230835b27e21365b09bfd2514803a27ed4 Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.666185 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.666254 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:47:05 crc kubenswrapper[4702]: I1125 10:47:05.714077 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:47:06 crc kubenswrapper[4702]: I1125 10:47:06.243930 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerStarted","Data":"4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d"} Nov 25 10:47:06 crc kubenswrapper[4702]: I1125 10:47:06.244278 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerStarted","Data":"185076190e74405ac380afdd6f8d79230835b27e21365b09bfd2514803a27ed4"} Nov 25 10:47:06 crc kubenswrapper[4702]: I1125 10:47:06.245965 4702 generic.go:334] "Generic (PLEG): container finished" podID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerID="d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174" exitCode=0 Nov 25 10:47:06 crc kubenswrapper[4702]: I1125 10:47:06.246054 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerDied","Data":"d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174"} Nov 25 10:47:06 crc kubenswrapper[4702]: I1125 10:47:06.293079 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:47:06 crc kubenswrapper[4702]: I1125 10:47:06.298587 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:47:07 crc kubenswrapper[4702]: I1125 10:47:07.253131 4702 generic.go:334] "Generic (PLEG): container finished" podID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerID="4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d" exitCode=0 Nov 25 10:47:07 crc kubenswrapper[4702]: I1125 10:47:07.254717 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerDied","Data":"4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d"} Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.150067 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bm7kf"] Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.151267 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.159637 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bm7kf"] Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.260935 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerStarted","Data":"fd8f6e4ff16f28dbaeeb5cfe7a68bf8676872b8db7a5e443ec70d338b7298c8c"} Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.262793 4702 generic.go:334] "Generic (PLEG): container finished" podID="79c9c319-b87f-4dae-9744-03ef948bf068" containerID="d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531" exitCode=0 Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.262865 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerDied","Data":"d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531"} Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.315357 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-catalog-content\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.315668 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d46tg\" (UniqueName: \"kubernetes.io/projected/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-kube-api-access-d46tg\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.315720 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-utilities\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.347784 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-hlfr2"] Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.349230 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.359538 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hlfr2"] Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.417244 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-utilities\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.417330 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-catalog-content\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.417354 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d46tg\" (UniqueName: \"kubernetes.io/projected/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-kube-api-access-d46tg\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.417398 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-utilities\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.417488 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hs8hm\" (UniqueName: \"kubernetes.io/projected/01585b85-8743-46df-bf57-28b9c7101515-kube-api-access-hs8hm\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.417512 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-catalog-content\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.419458 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-utilities\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.420607 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-catalog-content\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.448049 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d46tg\" (UniqueName: \"kubernetes.io/projected/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-kube-api-access-d46tg\") pod \"certified-operators-bm7kf\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.475458 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.518999 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hs8hm\" (UniqueName: \"kubernetes.io/projected/01585b85-8743-46df-bf57-28b9c7101515-kube-api-access-hs8hm\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.519097 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-utilities\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.519135 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-catalog-content\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.519671 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-utilities\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.519797 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-catalog-content\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.540877 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hs8hm\" (UniqueName: \"kubernetes.io/projected/01585b85-8743-46df-bf57-28b9c7101515-kube-api-access-hs8hm\") pod \"redhat-operators-hlfr2\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:08 crc kubenswrapper[4702]: I1125 10:47:08.665773 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:09 crc kubenswrapper[4702]: I1125 10:47:09.278759 4702 generic.go:334] "Generic (PLEG): container finished" podID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerID="e2032e149fb4598772a532e45cd059217a797b1b3b6bca41f6a69ad3f64a5950" exitCode=0 Nov 25 10:47:09 crc kubenswrapper[4702]: I1125 10:47:09.278822 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerDied","Data":"e2032e149fb4598772a532e45cd059217a797b1b3b6bca41f6a69ad3f64a5950"} Nov 25 10:47:09 crc kubenswrapper[4702]: I1125 10:47:09.301885 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-h8j5p" podStartSLOduration=4.339708367 podStartE2EDuration="13.301862036s" podCreationTimestamp="2025-11-25 10:46:56 +0000 UTC" firstStartedPulling="2025-11-25 10:46:58.134665101 +0000 UTC m=+915.501260790" lastFinishedPulling="2025-11-25 10:47:07.09681877 +0000 UTC m=+924.463414459" observedRunningTime="2025-11-25 10:47:09.29994259 +0000 UTC m=+926.666538289" watchObservedRunningTime="2025-11-25 10:47:09.301862036 +0000 UTC m=+926.668457725" Nov 25 10:47:09 crc kubenswrapper[4702]: I1125 10:47:09.823710 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-hlfr2"] Nov 25 10:47:09 crc kubenswrapper[4702]: I1125 10:47:09.999709 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bm7kf"] Nov 25 10:47:10 crc kubenswrapper[4702]: W1125 10:47:10.015376 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda7287d27_2a96_42f3_b8c5_1ca79d5c422a.slice/crio-8cecf1eb557baae88dd7b97618660ea11ff85eea5bf4b8f293380f1f4a6828c2 WatchSource:0}: Error finding container 8cecf1eb557baae88dd7b97618660ea11ff85eea5bf4b8f293380f1f4a6828c2: Status 404 returned error can't find the container with id 8cecf1eb557baae88dd7b97618660ea11ff85eea5bf4b8f293380f1f4a6828c2 Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.286547 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerStarted","Data":"2182f112f372e03b18f3d486238265ec161dc6dec68a99f6a5ab4ef0ecf5dca2"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.286598 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerStarted","Data":"2052cb42258f34325d97cc8217f93ce703e02ae44cf5e0faec400db1b7719209"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.290431 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerStarted","Data":"a08d5d8d8fab0ae910786624af8f46cfbdfe6c48ca29a3102c0395ca243f275f"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.298599 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerStarted","Data":"204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.307019 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerStarted","Data":"d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.315630 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerStarted","Data":"60ab00391af19ab4af6406c3e0a1d237f8e87dbeb6fb707b0bbbe519675c259b"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.315688 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerStarted","Data":"8cecf1eb557baae88dd7b97618660ea11ff85eea5bf4b8f293380f1f4a6828c2"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.318446 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerStarted","Data":"4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.334322 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerStarted","Data":"8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3"} Nov 25 10:47:10 crc kubenswrapper[4702]: I1125 10:47:10.440734 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5hzg9" podStartSLOduration=3.759327832 podStartE2EDuration="12.440710579s" podCreationTimestamp="2025-11-25 10:46:58 +0000 UTC" firstStartedPulling="2025-11-25 10:47:01.200683085 +0000 UTC m=+918.567278774" lastFinishedPulling="2025-11-25 10:47:09.882065832 +0000 UTC m=+927.248661521" observedRunningTime="2025-11-25 10:47:10.436101095 +0000 UTC m=+927.802696814" watchObservedRunningTime="2025-11-25 10:47:10.440710579 +0000 UTC m=+927.807306268" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.343528 4702 generic.go:334] "Generic (PLEG): container finished" podID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerID="4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519" exitCode=0 Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.343566 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerDied","Data":"4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519"} Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.749847 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-x8rxw"] Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.751346 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.762563 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8rxw"] Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.874674 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-utilities\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.875021 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c2n8k\" (UniqueName: \"kubernetes.io/projected/e64ea1d5-d260-4331-bc5d-800fd8248ff7-kube-api-access-c2n8k\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.875046 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-catalog-content\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.946156 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-2dxxh"] Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.947573 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.954349 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dxxh"] Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.976457 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-catalog-content\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.976580 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-utilities\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.976617 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c2n8k\" (UniqueName: \"kubernetes.io/projected/e64ea1d5-d260-4331-bc5d-800fd8248ff7-kube-api-access-c2n8k\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.977003 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-catalog-content\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:11 crc kubenswrapper[4702]: I1125 10:47:11.977164 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-utilities\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.015008 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c2n8k\" (UniqueName: \"kubernetes.io/projected/e64ea1d5-d260-4331-bc5d-800fd8248ff7-kube-api-access-c2n8k\") pod \"redhat-marketplace-x8rxw\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.069328 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.078048 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-catalog-content\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.078258 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-utilities\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.078365 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6lpk2\" (UniqueName: \"kubernetes.io/projected/615b8e09-5a50-4af4-89dd-31fb6282baea-kube-api-access-6lpk2\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.179729 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-utilities\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.179814 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6lpk2\" (UniqueName: \"kubernetes.io/projected/615b8e09-5a50-4af4-89dd-31fb6282baea-kube-api-access-6lpk2\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.179863 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-catalog-content\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.180535 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-catalog-content\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.180748 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-utilities\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.210051 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6lpk2\" (UniqueName: \"kubernetes.io/projected/615b8e09-5a50-4af4-89dd-31fb6282baea-kube-api-access-6lpk2\") pod \"certified-operators-2dxxh\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.266662 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.353943 4702 generic.go:334] "Generic (PLEG): container finished" podID="01585b85-8743-46df-bf57-28b9c7101515" containerID="2182f112f372e03b18f3d486238265ec161dc6dec68a99f6a5ab4ef0ecf5dca2" exitCode=0 Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.354030 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerDied","Data":"2182f112f372e03b18f3d486238265ec161dc6dec68a99f6a5ab4ef0ecf5dca2"} Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.355611 4702 generic.go:334] "Generic (PLEG): container finished" podID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerID="204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5" exitCode=0 Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.355667 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerDied","Data":"204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5"} Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.363118 4702 generic.go:334] "Generic (PLEG): container finished" podID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerID="8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3" exitCode=0 Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.363174 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerDied","Data":"8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3"} Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.612578 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-2dxxh"] Nov 25 10:47:12 crc kubenswrapper[4702]: I1125 10:47:12.649267 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8rxw"] Nov 25 10:47:12 crc kubenswrapper[4702]: W1125 10:47:12.656062 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode64ea1d5_d260_4331_bc5d_800fd8248ff7.slice/crio-4d009bfe5ec394c8ffe6f1299352b541c461d952882348b0f2f7f9c6e7b9d015 WatchSource:0}: Error finding container 4d009bfe5ec394c8ffe6f1299352b541c461d952882348b0f2f7f9c6e7b9d015: Status 404 returned error can't find the container with id 4d009bfe5ec394c8ffe6f1299352b541c461d952882348b0f2f7f9c6e7b9d015 Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.137717 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.186885 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.371604 4702 generic.go:334] "Generic (PLEG): container finished" podID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerID="8d4133e070099b322dd6280824c5cb2c572631b32a6396a1a9f886f5d09c184e" exitCode=0 Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.371686 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerDied","Data":"8d4133e070099b322dd6280824c5cb2c572631b32a6396a1a9f886f5d09c184e"} Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.371753 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerStarted","Data":"4d009bfe5ec394c8ffe6f1299352b541c461d952882348b0f2f7f9c6e7b9d015"} Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.373766 4702 generic.go:334] "Generic (PLEG): container finished" podID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerID="a08d5d8d8fab0ae910786624af8f46cfbdfe6c48ca29a3102c0395ca243f275f" exitCode=0 Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.373819 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerDied","Data":"a08d5d8d8fab0ae910786624af8f46cfbdfe6c48ca29a3102c0395ca243f275f"} Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.375783 4702 generic.go:334] "Generic (PLEG): container finished" podID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerID="60ab00391af19ab4af6406c3e0a1d237f8e87dbeb6fb707b0bbbe519675c259b" exitCode=0 Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.375871 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerDied","Data":"60ab00391af19ab4af6406c3e0a1d237f8e87dbeb6fb707b0bbbe519675c259b"} Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.378857 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerStarted","Data":"2d9a65d40c08092e2c4483e6a797f055140780b63a8959129157e9b1ffbc01ca"} Nov 25 10:47:13 crc kubenswrapper[4702]: I1125 10:47:13.378914 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerStarted","Data":"1dc4fe0b73f92d512d1acff1eef1a8c52c10b55f452a012750814bace1b1bdf4"} Nov 25 10:47:14 crc kubenswrapper[4702]: I1125 10:47:14.387268 4702 generic.go:334] "Generic (PLEG): container finished" podID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerID="2d9a65d40c08092e2c4483e6a797f055140780b63a8959129157e9b1ffbc01ca" exitCode=0 Nov 25 10:47:14 crc kubenswrapper[4702]: I1125 10:47:14.387670 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerDied","Data":"2d9a65d40c08092e2c4483e6a797f055140780b63a8959129157e9b1ffbc01ca"} Nov 25 10:47:14 crc kubenswrapper[4702]: I1125 10:47:14.394578 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerStarted","Data":"75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96"} Nov 25 10:47:14 crc kubenswrapper[4702]: I1125 10:47:14.446965 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-p8jqc" podStartSLOduration=3.670131973 podStartE2EDuration="10.446941878s" podCreationTimestamp="2025-11-25 10:47:04 +0000 UTC" firstStartedPulling="2025-11-25 10:47:07.256642872 +0000 UTC m=+924.623238561" lastFinishedPulling="2025-11-25 10:47:14.033452777 +0000 UTC m=+931.400048466" observedRunningTime="2025-11-25 10:47:14.432330802 +0000 UTC m=+931.798926501" watchObservedRunningTime="2025-11-25 10:47:14.446941878 +0000 UTC m=+931.813537577" Nov 25 10:47:14 crc kubenswrapper[4702]: I1125 10:47:14.660961 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:14 crc kubenswrapper[4702]: I1125 10:47:14.661037 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.350526 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-wrrrc"] Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.352409 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.368242 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wrrrc"] Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.420950 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerStarted","Data":"5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be"} Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.422816 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerStarted","Data":"89f2d6b06896e0358771ba2495015c0710d38a6168a7fb156a516fb2c5796c2c"} Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.425070 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerStarted","Data":"bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06"} Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.427202 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerStarted","Data":"588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197"} Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.443197 4702 generic.go:334] "Generic (PLEG): container finished" podID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerID="0f09617b9671c1b0fa766ff2b41932af62567280081cfce2590adf120e7a71d8" exitCode=0 Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.443972 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerDied","Data":"0f09617b9671c1b0fa766ff2b41932af62567280081cfce2590adf120e7a71d8"} Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.458316 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ffp8k" podStartSLOduration=3.400384977 podStartE2EDuration="14.458296241s" podCreationTimestamp="2025-11-25 10:47:01 +0000 UTC" firstStartedPulling="2025-11-25 10:47:03.22193317 +0000 UTC m=+920.588528859" lastFinishedPulling="2025-11-25 10:47:14.279844434 +0000 UTC m=+931.646440123" observedRunningTime="2025-11-25 10:47:15.454982444 +0000 UTC m=+932.821578153" watchObservedRunningTime="2025-11-25 10:47:15.458296241 +0000 UTC m=+932.824891930" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.484683 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2x6vv" podStartSLOduration=8.720173739 podStartE2EDuration="13.48466583s" podCreationTimestamp="2025-11-25 10:47:02 +0000 UTC" firstStartedPulling="2025-11-25 10:47:09.387260177 +0000 UTC m=+926.753855866" lastFinishedPulling="2025-11-25 10:47:14.151752268 +0000 UTC m=+931.518347957" observedRunningTime="2025-11-25 10:47:15.482205498 +0000 UTC m=+932.848801197" watchObservedRunningTime="2025-11-25 10:47:15.48466583 +0000 UTC m=+932.851261519" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.540204 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-catalog-content\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.540314 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tdss8\" (UniqueName: \"kubernetes.io/projected/57ac8391-54bc-457b-a6fe-8f4e761f53d1-kube-api-access-tdss8\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.540383 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-utilities\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.548362 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-g5k2q" podStartSLOduration=4.384972148 podStartE2EDuration="11.548333538s" podCreationTimestamp="2025-11-25 10:47:04 +0000 UTC" firstStartedPulling="2025-11-25 10:47:07.256435956 +0000 UTC m=+924.623031645" lastFinishedPulling="2025-11-25 10:47:14.419797346 +0000 UTC m=+931.786393035" observedRunningTime="2025-11-25 10:47:15.536226224 +0000 UTC m=+932.902821923" watchObservedRunningTime="2025-11-25 10:47:15.548333538 +0000 UTC m=+932.914929247" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.551339 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wzfg8"] Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.552820 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.571869 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzfg8"] Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.642246 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-catalog-content\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.643113 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tdss8\" (UniqueName: \"kubernetes.io/projected/57ac8391-54bc-457b-a6fe-8f4e761f53d1-kube-api-access-tdss8\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.643792 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-utilities\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.642894 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-catalog-content\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.644236 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-utilities\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.672574 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tdss8\" (UniqueName: \"kubernetes.io/projected/57ac8391-54bc-457b-a6fe-8f4e761f53d1-kube-api-access-tdss8\") pod \"redhat-operators-wrrrc\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.713445 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-p8jqc" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:15 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:15 crc kubenswrapper[4702]: > Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.744722 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pwmvk\" (UniqueName: \"kubernetes.io/projected/d3c5667b-d960-49fb-b84c-cc17236f96f3-kube-api-access-pwmvk\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.744876 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-catalog-content\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.745009 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-utilities\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.845927 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pwmvk\" (UniqueName: \"kubernetes.io/projected/d3c5667b-d960-49fb-b84c-cc17236f96f3-kube-api-access-pwmvk\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.846025 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-catalog-content\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.846077 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-utilities\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.846654 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-utilities\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.846700 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-catalog-content\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.867730 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pwmvk\" (UniqueName: \"kubernetes.io/projected/d3c5667b-d960-49fb-b84c-cc17236f96f3-kube-api-access-pwmvk\") pod \"redhat-marketplace-wzfg8\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.876478 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:15 crc kubenswrapper[4702]: I1125 10:47:15.968151 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.475290 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerStarted","Data":"f3925226d9a8916e34a32cb423b6c9e27090073e2c1cbd6fe75098656536c3f3"} Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.477887 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerStarted","Data":"911d439255c9b674d4d8075b0159f31aea5ccff302dd15cfda8f6e2d0e9bb31b"} Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.651357 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq"] Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.653125 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.656129 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.680894 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq"] Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.701370 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.701540 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.701614 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4cp2t\" (UniqueName: \"kubernetes.io/projected/a065282b-3c41-4b07-93b3-e29e2502e89a-kube-api-access-4cp2t\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.803295 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4cp2t\" (UniqueName: \"kubernetes.io/projected/a065282b-3c41-4b07-93b3-e29e2502e89a-kube-api-access-4cp2t\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.803365 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.803440 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.804058 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-bundle\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.804127 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-util\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.831158 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4cp2t\" (UniqueName: \"kubernetes.io/projected/a065282b-3c41-4b07-93b3-e29e2502e89a-kube-api-access-4cp2t\") pod \"e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.837745 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzfg8"] Nov 25 10:47:16 crc kubenswrapper[4702]: W1125 10:47:16.852438 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd3c5667b_d960_49fb_b84c_cc17236f96f3.slice/crio-a0790dc1bf7f5ccfc28c0df08698bc13c3813e4bc6590fc11ead99c4a85ac576 WatchSource:0}: Error finding container a0790dc1bf7f5ccfc28c0df08698bc13c3813e4bc6590fc11ead99c4a85ac576: Status 404 returned error can't find the container with id a0790dc1bf7f5ccfc28c0df08698bc13c3813e4bc6590fc11ead99c4a85ac576 Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.914788 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-wrrrc"] Nov 25 10:47:16 crc kubenswrapper[4702]: W1125 10:47:16.916645 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod57ac8391_54bc_457b_a6fe_8f4e761f53d1.slice/crio-c78f10548d460922242d5cc9986603562aa3bf5add9a772a059a8009cbdd4596 WatchSource:0}: Error finding container c78f10548d460922242d5cc9986603562aa3bf5add9a772a059a8009cbdd4596: Status 404 returned error can't find the container with id c78f10548d460922242d5cc9986603562aa3bf5add9a772a059a8009cbdd4596 Nov 25 10:47:16 crc kubenswrapper[4702]: I1125 10:47:16.978032 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.217417 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq"] Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.263445 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.263515 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:47:17 crc kubenswrapper[4702]: W1125 10:47:17.299718 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda065282b_3c41_4b07_93b3_e29e2502e89a.slice/crio-15c0794b06b744917e28e5b7702630378cf9cdbbe610cb37125da39cb308c4e3 WatchSource:0}: Error finding container 15c0794b06b744917e28e5b7702630378cf9cdbbe610cb37125da39cb308c4e3: Status 404 returned error can't find the container with id 15c0794b06b744917e28e5b7702630378cf9cdbbe610cb37125da39cb308c4e3 Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.312499 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.510112 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerStarted","Data":"236243b8156d04bd328f673aca75baf2c2740114f60068f5746ed9b74f85ad15"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.532886 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerStarted","Data":"15a02248c7db9a98d1211202b96246d97b09aafaeee81f36f03320c259aeefcf"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.532959 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerStarted","Data":"a0790dc1bf7f5ccfc28c0df08698bc13c3813e4bc6590fc11ead99c4a85ac576"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.534933 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerStarted","Data":"e158bb1f510a144c4ea5bd46b430711bea5419ff4d7a1a99ee18ae5090af79f7"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.534973 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerStarted","Data":"c78f10548d460922242d5cc9986603562aa3bf5add9a772a059a8009cbdd4596"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.537010 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerStarted","Data":"1b7fd2b8ed57fd75c6f22dfd0c94e8c1fdf84eabf8a165b4689f0edbb43f1b42"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.537038 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerStarted","Data":"15c0794b06b744917e28e5b7702630378cf9cdbbe610cb37125da39cb308c4e3"} Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.548812 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bm7kf" podStartSLOduration=6.192392894 podStartE2EDuration="9.548790146s" podCreationTimestamp="2025-11-25 10:47:08 +0000 UTC" firstStartedPulling="2025-11-25 10:47:13.376860203 +0000 UTC m=+930.743455892" lastFinishedPulling="2025-11-25 10:47:16.733257455 +0000 UTC m=+934.099853144" observedRunningTime="2025-11-25 10:47:17.541466082 +0000 UTC m=+934.908061781" watchObservedRunningTime="2025-11-25 10:47:17.548790146 +0000 UTC m=+934.915385835" Nov 25 10:47:17 crc kubenswrapper[4702]: I1125 10:47:17.599042 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.476358 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.476463 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.745356 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-bhnmp"] Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.747863 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.759739 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bhnmp"] Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.836464 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4r52s\" (UniqueName: \"kubernetes.io/projected/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-kube-api-access-4r52s\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.836545 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-catalog-content\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.836582 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-utilities\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.861989 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.862025 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.904694 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.938084 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-catalog-content\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.938152 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-utilities\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.938221 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4r52s\" (UniqueName: \"kubernetes.io/projected/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-kube-api-access-4r52s\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.939081 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-catalog-content\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.939382 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-utilities\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.949406 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-kd7rs"] Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.952879 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.955408 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kd7rs"] Nov 25 10:47:18 crc kubenswrapper[4702]: I1125 10:47:18.957143 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4r52s\" (UniqueName: \"kubernetes.io/projected/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-kube-api-access-4r52s\") pod \"certified-operators-bhnmp\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.039938 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-catalog-content\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.040276 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9sb4f\" (UniqueName: \"kubernetes.io/projected/62348515-75a4-4328-beb2-9e7df5e23fc3-kube-api-access-9sb4f\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.040395 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-utilities\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.067373 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.145258 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-catalog-content\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.145342 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9sb4f\" (UniqueName: \"kubernetes.io/projected/62348515-75a4-4328-beb2-9e7df5e23fc3-kube-api-access-9sb4f\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.145383 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-utilities\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.146309 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-utilities\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.146353 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-catalog-content\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.167678 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9sb4f\" (UniqueName: \"kubernetes.io/projected/62348515-75a4-4328-beb2-9e7df5e23fc3-kube-api-access-9sb4f\") pod \"redhat-operators-kd7rs\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.421447 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.478974 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-bhnmp"] Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.523410 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-bm7kf" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:19 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:19 crc kubenswrapper[4702]: > Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.557461 4702 generic.go:334] "Generic (PLEG): container finished" podID="01585b85-8743-46df-bf57-28b9c7101515" containerID="89f2d6b06896e0358771ba2495015c0710d38a6168a7fb156a516fb2c5796c2c" exitCode=0 Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.557546 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerDied","Data":"89f2d6b06896e0358771ba2495015c0710d38a6168a7fb156a516fb2c5796c2c"} Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.561230 4702 generic.go:334] "Generic (PLEG): container finished" podID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerID="911d439255c9b674d4d8075b0159f31aea5ccff302dd15cfda8f6e2d0e9bb31b" exitCode=0 Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.561298 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerDied","Data":"911d439255c9b674d4d8075b0159f31aea5ccff302dd15cfda8f6e2d0e9bb31b"} Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.566371 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerStarted","Data":"af4a81384ce5f6a82966d479328a1989f4787122f158e5c562c8130ad3f9216e"} Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.571304 4702 generic.go:334] "Generic (PLEG): container finished" podID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerID="f3925226d9a8916e34a32cb423b6c9e27090073e2c1cbd6fe75098656536c3f3" exitCode=0 Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.572209 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerDied","Data":"f3925226d9a8916e34a32cb423b6c9e27090073e2c1cbd6fe75098656536c3f3"} Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.629568 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:47:19 crc kubenswrapper[4702]: I1125 10:47:19.713726 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-kd7rs"] Nov 25 10:47:20 crc kubenswrapper[4702]: I1125 10:47:20.579687 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerStarted","Data":"0e1c96d92b5c6d7ef18434af3f6b0259a2e960b276391c966c326d2a3d4a7fe3"} Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.143551 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ggkwj"] Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.144869 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.167199 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggkwj"] Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.278295 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-utilities\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.278410 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2fxvj\" (UniqueName: \"kubernetes.io/projected/bf090575-6d44-4e0b-9522-cb864bb8169b-kube-api-access-2fxvj\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.278435 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-catalog-content\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.351528 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-9ps98"] Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.354517 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.362949 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9ps98"] Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.379750 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-utilities\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.380169 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2fxvj\" (UniqueName: \"kubernetes.io/projected/bf090575-6d44-4e0b-9522-cb864bb8169b-kube-api-access-2fxvj\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.380325 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-catalog-content\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.381123 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-catalog-content\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.381557 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-utilities\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.401660 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2fxvj\" (UniqueName: \"kubernetes.io/projected/bf090575-6d44-4e0b-9522-cb864bb8169b-kube-api-access-2fxvj\") pod \"redhat-marketplace-ggkwj\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.474634 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.481758 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9dz9n\" (UniqueName: \"kubernetes.io/projected/99bc87a4-5a14-4179-9e8b-4a49298b6f78-kube-api-access-9dz9n\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.481813 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-catalog-content\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.481978 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-utilities\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.585801 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9dz9n\" (UniqueName: \"kubernetes.io/projected/99bc87a4-5a14-4179-9e8b-4a49298b6f78-kube-api-access-9dz9n\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.585866 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-catalog-content\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.585986 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-utilities\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.587072 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-utilities\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.587274 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-catalog-content\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.628349 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9dz9n\" (UniqueName: \"kubernetes.io/projected/99bc87a4-5a14-4179-9e8b-4a49298b6f78-kube-api-access-9dz9n\") pod \"redhat-operators-9ps98\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.671117 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.681706 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.683570 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.735574 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:21 crc kubenswrapper[4702]: I1125 10:47:21.888199 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggkwj"] Nov 25 10:47:21 crc kubenswrapper[4702]: W1125 10:47:21.889106 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podbf090575_6d44_4e0b_9522_cb864bb8169b.slice/crio-5d260fe2c9e6288c64bb9221d20254a5a93203a7ec10ad5a9ab53a4df5b2ebcd WatchSource:0}: Error finding container 5d260fe2c9e6288c64bb9221d20254a5a93203a7ec10ad5a9ab53a4df5b2ebcd: Status 404 returned error can't find the container with id 5d260fe2c9e6288c64bb9221d20254a5a93203a7ec10ad5a9ab53a4df5b2ebcd Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.087189 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-9ps98"] Nov 25 10:47:22 crc kubenswrapper[4702]: W1125 10:47:22.092274 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99bc87a4_5a14_4179_9e8b_4a49298b6f78.slice/crio-929e3cad31166c5b8255ef2a76eebd03219030517cc5de59fd3c5dee687bef27 WatchSource:0}: Error finding container 929e3cad31166c5b8255ef2a76eebd03219030517cc5de59fd3c5dee687bef27: Status 404 returned error can't find the container with id 929e3cad31166c5b8255ef2a76eebd03219030517cc5de59fd3c5dee687bef27 Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.595210 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerStarted","Data":"5b1f3446f2b17df3c21c4f1e77021594765bafddeec205c46bfbfdb4950e2e78"} Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.596363 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerStarted","Data":"5d260fe2c9e6288c64bb9221d20254a5a93203a7ec10ad5a9ab53a4df5b2ebcd"} Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.597403 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerStarted","Data":"929e3cad31166c5b8255ef2a76eebd03219030517cc5de59fd3c5dee687bef27"} Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.641868 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.862312 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.862374 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:22 crc kubenswrapper[4702]: I1125 10:47:22.903721 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.547130 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mm67q"] Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.548650 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.560444 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm67q"] Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.604041 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerStarted","Data":"312d37902ebc0707e1ceb80bce86f9b69a3bfb63169ac3a8bd79ae9b8002920f"} Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.605446 4702 generic.go:334] "Generic (PLEG): container finished" podID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerID="5b1f3446f2b17df3c21c4f1e77021594765bafddeec205c46bfbfdb4950e2e78" exitCode=0 Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.605537 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerDied","Data":"5b1f3446f2b17df3c21c4f1e77021594765bafddeec205c46bfbfdb4950e2e78"} Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.611018 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xr6cx\" (UniqueName: \"kubernetes.io/projected/86eb5301-b8dd-4784-81c9-56375cbe983d-kube-api-access-xr6cx\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.611230 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-utilities\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.611313 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-catalog-content\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.643045 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.713017 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xr6cx\" (UniqueName: \"kubernetes.io/projected/86eb5301-b8dd-4784-81c9-56375cbe983d-kube-api-access-xr6cx\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.713135 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-utilities\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.713211 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-catalog-content\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.713700 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-catalog-content\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.713772 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-utilities\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.736154 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xr6cx\" (UniqueName: \"kubernetes.io/projected/86eb5301-b8dd-4784-81c9-56375cbe983d-kube-api-access-xr6cx\") pod \"certified-operators-mm67q\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.746755 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-bc4kb"] Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.749193 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.754311 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bc4kb"] Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.813863 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-l7d9j\" (UniqueName: \"kubernetes.io/projected/9144c34a-7330-4d8b-aaa7-34747a3f4773-kube-api-access-l7d9j\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.813963 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-catalog-content\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.814035 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-utilities\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.868187 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.916004 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-utilities\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.916125 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-l7d9j\" (UniqueName: \"kubernetes.io/projected/9144c34a-7330-4d8b-aaa7-34747a3f4773-kube-api-access-l7d9j\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.916214 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-catalog-content\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.916629 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-catalog-content\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.916861 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-utilities\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:23 crc kubenswrapper[4702]: I1125 10:47:23.942275 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-l7d9j\" (UniqueName: \"kubernetes.io/projected/9144c34a-7330-4d8b-aaa7-34747a3f4773-kube-api-access-l7d9j\") pod \"community-operators-bc4kb\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.068301 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mm67q"] Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.078859 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.471048 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-bc4kb"] Nov 25 10:47:24 crc kubenswrapper[4702]: W1125 10:47:24.474484 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9144c34a_7330_4d8b_aaa7_34747a3f4773.slice/crio-c518dd51e4a58f27c5d11c86840b89a9e76d38dc1b0335f40747493cb9486a46 WatchSource:0}: Error finding container c518dd51e4a58f27c5d11c86840b89a9e76d38dc1b0335f40747493cb9486a46: Status 404 returned error can't find the container with id c518dd51e4a58f27c5d11c86840b89a9e76d38dc1b0335f40747493cb9486a46 Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.613379 4702 generic.go:334] "Generic (PLEG): container finished" podID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerID="312d37902ebc0707e1ceb80bce86f9b69a3bfb63169ac3a8bd79ae9b8002920f" exitCode=0 Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.613460 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerDied","Data":"312d37902ebc0707e1ceb80bce86f9b69a3bfb63169ac3a8bd79ae9b8002920f"} Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.614279 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bc4kb" event={"ID":"9144c34a-7330-4d8b-aaa7-34747a3f4773","Type":"ContainerStarted","Data":"c518dd51e4a58f27c5d11c86840b89a9e76d38dc1b0335f40747493cb9486a46"} Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.615084 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerStarted","Data":"147484e03d90c3228a9cddb87fc206875291fbe378e894b9ed1f5352390fc498"} Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.702222 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:24 crc kubenswrapper[4702]: I1125 10:47:24.759863 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.271011 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.271071 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.311965 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.636932 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerStarted","Data":"5d8c9d358baadd16d57d954ffdf99d33db9b61c38057a429d8a23d0557ad4793"} Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.638544 4702 generic.go:334] "Generic (PLEG): container finished" podID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerID="15a02248c7db9a98d1211202b96246d97b09aafaeee81f36f03320c259aeefcf" exitCode=0 Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.638594 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerDied","Data":"15a02248c7db9a98d1211202b96246d97b09aafaeee81f36f03320c259aeefcf"} Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.640543 4702 generic.go:334] "Generic (PLEG): container finished" podID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerID="e158bb1f510a144c4ea5bd46b430711bea5419ff4d7a1a99ee18ae5090af79f7" exitCode=0 Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.640603 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerDied","Data":"e158bb1f510a144c4ea5bd46b430711bea5419ff4d7a1a99ee18ae5090af79f7"} Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.642758 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerStarted","Data":"5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd"} Nov 25 10:47:25 crc kubenswrapper[4702]: I1125 10:47:25.692316 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:47:26 crc kubenswrapper[4702]: I1125 10:47:26.649664 4702 generic.go:334] "Generic (PLEG): container finished" podID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerID="5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd" exitCode=0 Nov 25 10:47:26 crc kubenswrapper[4702]: I1125 10:47:26.649748 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerDied","Data":"5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd"} Nov 25 10:47:26 crc kubenswrapper[4702]: I1125 10:47:26.651687 4702 generic.go:334] "Generic (PLEG): container finished" podID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerID="5d8c9d358baadd16d57d954ffdf99d33db9b61c38057a429d8a23d0557ad4793" exitCode=0 Nov 25 10:47:26 crc kubenswrapper[4702]: I1125 10:47:26.651737 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerDied","Data":"5d8c9d358baadd16d57d954ffdf99d33db9b61c38057a429d8a23d0557ad4793"} Nov 25 10:47:26 crc kubenswrapper[4702]: I1125 10:47:26.652944 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerStarted","Data":"7c98a06037a9bae3167a3dea7d71b455ead738e8a6dd90efc2f1df0d4e117d13"} Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.560532 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-gzp7g"] Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.562670 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.599189 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gzp7g"] Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.660672 4702 generic.go:334] "Generic (PLEG): container finished" podID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerID="1b7fd2b8ed57fd75c6f22dfd0c94e8c1fdf84eabf8a165b4689f0edbb43f1b42" exitCode=0 Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.660753 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerDied","Data":"1b7fd2b8ed57fd75c6f22dfd0c94e8c1fdf84eabf8a165b4689f0edbb43f1b42"} Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.662840 4702 generic.go:334] "Generic (PLEG): container finished" podID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerID="6b80647459cfe754791ad602042a1fc5b6c2cf24f96ef0b16403cc9d94135398" exitCode=0 Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.662871 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bc4kb" event={"ID":"9144c34a-7330-4d8b-aaa7-34747a3f4773","Type":"ContainerDied","Data":"6b80647459cfe754791ad602042a1fc5b6c2cf24f96ef0b16403cc9d94135398"} Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.664226 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wzlrx\" (UniqueName: \"kubernetes.io/projected/2980b36d-ef86-443d-9c30-b38cdf91e95b-kube-api-access-wzlrx\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.664310 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-utilities\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.664335 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-catalog-content\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.665231 4702 generic.go:334] "Generic (PLEG): container finished" podID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerID="7c98a06037a9bae3167a3dea7d71b455ead738e8a6dd90efc2f1df0d4e117d13" exitCode=0 Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.665588 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerDied","Data":"7c98a06037a9bae3167a3dea7d71b455ead738e8a6dd90efc2f1df0d4e117d13"} Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.765154 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wzlrx\" (UniqueName: \"kubernetes.io/projected/2980b36d-ef86-443d-9c30-b38cdf91e95b-kube-api-access-wzlrx\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.765378 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-utilities\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.765961 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-utilities\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.766020 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-catalog-content\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.766363 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-catalog-content\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.783946 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wzlrx\" (UniqueName: \"kubernetes.io/projected/2980b36d-ef86-443d-9c30-b38cdf91e95b-kube-api-access-wzlrx\") pod \"redhat-operators-gzp7g\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:27 crc kubenswrapper[4702]: I1125 10:47:27.879303 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.143596 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-t6m9v"] Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.145158 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.152606 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t6m9v"] Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.292084 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xwk4q\" (UniqueName: \"kubernetes.io/projected/2db4d43f-926d-4fba-84d8-e49c594c5026-kube-api-access-xwk4q\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.292424 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-utilities\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.292660 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-catalog-content\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.393740 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-utilities\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.393798 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-catalog-content\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.393861 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xwk4q\" (UniqueName: \"kubernetes.io/projected/2db4d43f-926d-4fba-84d8-e49c594c5026-kube-api-access-xwk4q\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.394353 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-catalog-content\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.394498 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-utilities\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.421926 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xwk4q\" (UniqueName: \"kubernetes.io/projected/2db4d43f-926d-4fba-84d8-e49c594c5026-kube-api-access-xwk4q\") pod \"redhat-marketplace-t6m9v\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.465179 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.518629 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:28 crc kubenswrapper[4702]: I1125 10:47:28.555279 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.545240 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-gc8zv"] Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.546840 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.568731 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gc8zv"] Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.650886 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5vgc7\" (UniqueName: \"kubernetes.io/projected/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-kube-api-access-5vgc7\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.651049 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-catalog-content\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.651085 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-utilities\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.752308 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-catalog-content\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.752691 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-utilities\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.752791 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-catalog-content\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.752801 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5vgc7\" (UniqueName: \"kubernetes.io/projected/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-kube-api-access-5vgc7\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.753385 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-utilities\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.771734 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5vgc7\" (UniqueName: \"kubernetes.io/projected/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-kube-api-access-5vgc7\") pod \"community-operators-gc8zv\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:32 crc kubenswrapper[4702]: I1125 10:47:32.864863 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.147476 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-zs89l"] Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.149134 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.155837 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zs89l"] Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.259144 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-catalog-content\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.259230 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-utilities\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.259324 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xdg4f\" (UniqueName: \"kubernetes.io/projected/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-kube-api-access-xdg4f\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.360338 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xdg4f\" (UniqueName: \"kubernetes.io/projected/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-kube-api-access-xdg4f\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.360444 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-catalog-content\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.360482 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-utilities\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.361026 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-utilities\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.361117 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-catalog-content\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.377201 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xdg4f\" (UniqueName: \"kubernetes.io/projected/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-kube-api-access-xdg4f\") pod \"certified-operators-zs89l\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:33 crc kubenswrapper[4702]: I1125 10:47:33.517817 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:47:34 crc kubenswrapper[4702]: I1125 10:47:34.296820 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-t6m9v"] Nov 25 10:47:34 crc kubenswrapper[4702]: I1125 10:47:34.446108 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-gzp7g"] Nov 25 10:47:34 crc kubenswrapper[4702]: W1125 10:47:34.540725 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2db4d43f_926d_4fba_84d8_e49c594c5026.slice/crio-842edda2ff09462696822edf78465b5a163d6af146ee4c0c896577e9223ec735 WatchSource:0}: Error finding container 842edda2ff09462696822edf78465b5a163d6af146ee4c0c896577e9223ec735: Status 404 returned error can't find the container with id 842edda2ff09462696822edf78465b5a163d6af146ee4c0c896577e9223ec735 Nov 25 10:47:34 crc kubenswrapper[4702]: W1125 10:47:34.588526 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2980b36d_ef86_443d_9c30_b38cdf91e95b.slice/crio-917483e3bb236d6587f4b0e50db9efda2f253cfe8730a3cd2e3d482f944640bc WatchSource:0}: Error finding container 917483e3bb236d6587f4b0e50db9efda2f253cfe8730a3cd2e3d482f944640bc: Status 404 returned error can't find the container with id 917483e3bb236d6587f4b0e50db9efda2f253cfe8730a3cd2e3d482f944640bc Nov 25 10:47:34 crc kubenswrapper[4702]: I1125 10:47:34.725188 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t6m9v" event={"ID":"2db4d43f-926d-4fba-84d8-e49c594c5026","Type":"ContainerStarted","Data":"842edda2ff09462696822edf78465b5a163d6af146ee4c0c896577e9223ec735"} Nov 25 10:47:34 crc kubenswrapper[4702]: I1125 10:47:34.740006 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerStarted","Data":"917483e3bb236d6587f4b0e50db9efda2f253cfe8730a3cd2e3d482f944640bc"} Nov 25 10:47:34 crc kubenswrapper[4702]: I1125 10:47:34.847982 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-gc8zv"] Nov 25 10:47:34 crc kubenswrapper[4702]: W1125 10:47:34.956196 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a183e71_4cd9_4ec1_8ec4_3eff9a0b7f62.slice/crio-eb8c1b37a991db0e80e8ee835c374dc4242d62182087166b699b9f3ff573e530 WatchSource:0}: Error finding container eb8c1b37a991db0e80e8ee835c374dc4242d62182087166b699b9f3ff573e530: Status 404 returned error can't find the container with id eb8c1b37a991db0e80e8ee835c374dc4242d62182087166b699b9f3ff573e530 Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.165155 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-zs89l"] Nov 25 10:47:35 crc kubenswrapper[4702]: W1125 10:47:35.256269 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd4c7f2d3_ab7f_410e_9af3_b59ff87c9bc4.slice/crio-bc56638e36d64076c03f0bdffd8e503b7930775b90ff4d3c7e51fd19ff0cd385 WatchSource:0}: Error finding container bc56638e36d64076c03f0bdffd8e503b7930775b90ff4d3c7e51fd19ff0cd385: Status 404 returned error can't find the container with id bc56638e36d64076c03f0bdffd8e503b7930775b90ff4d3c7e51fd19ff0cd385 Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.762152 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerStarted","Data":"82351b6db28029ffcee108fe757f14e4156eca8b9d3a49945f316fda2ae2f586"} Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.779352 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerStarted","Data":"bc56638e36d64076c03f0bdffd8e503b7930775b90ff4d3c7e51fd19ff0cd385"} Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.787425 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-hlfr2" podStartSLOduration=5.868443985 podStartE2EDuration="27.787387259s" podCreationTimestamp="2025-11-25 10:47:08 +0000 UTC" firstStartedPulling="2025-11-25 10:47:12.356039284 +0000 UTC m=+929.722634963" lastFinishedPulling="2025-11-25 10:47:34.274982548 +0000 UTC m=+951.641578237" observedRunningTime="2025-11-25 10:47:35.786269006 +0000 UTC m=+953.152864715" watchObservedRunningTime="2025-11-25 10:47:35.787387259 +0000 UTC m=+953.153982948" Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.797091 4702 generic.go:334] "Generic (PLEG): container finished" podID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerID="c046361a896970752f03bd584865bcd21aa43b87e29a639b6ba236de0da97b67" exitCode=0 Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.797195 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t6m9v" event={"ID":"2db4d43f-926d-4fba-84d8-e49c594c5026","Type":"ContainerDied","Data":"c046361a896970752f03bd584865bcd21aa43b87e29a639b6ba236de0da97b67"} Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.820864 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerStarted","Data":"eb8c1b37a991db0e80e8ee835c374dc4242d62182087166b699b9f3ff573e530"} Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.838652 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerStarted","Data":"7ffa06af9e254343a7e2b52dc964d06ce8cf159a6678e9676dcf87583f439450"} Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.841956 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerStarted","Data":"4eb8d937e6f1dcba11a8c30d56006efeb607742c4550b50c45d3e74c89e2aaae"} Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.896314 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-x8rxw" podStartSLOduration=7.142325032 podStartE2EDuration="24.896294866s" podCreationTimestamp="2025-11-25 10:47:11 +0000 UTC" firstStartedPulling="2025-11-25 10:47:14.400443722 +0000 UTC m=+931.767039411" lastFinishedPulling="2025-11-25 10:47:32.154413556 +0000 UTC m=+949.521009245" observedRunningTime="2025-11-25 10:47:35.895526063 +0000 UTC m=+953.262121762" watchObservedRunningTime="2025-11-25 10:47:35.896294866 +0000 UTC m=+953.262890555" Nov 25 10:47:35 crc kubenswrapper[4702]: I1125 10:47:35.899289 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-2dxxh" podStartSLOduration=5.382253017 podStartE2EDuration="24.899279243s" podCreationTimestamp="2025-11-25 10:47:11 +0000 UTC" firstStartedPulling="2025-11-25 10:47:14.389161653 +0000 UTC m=+931.755757342" lastFinishedPulling="2025-11-25 10:47:33.906187879 +0000 UTC m=+951.272783568" observedRunningTime="2025-11-25 10:47:35.874992935 +0000 UTC m=+953.241588634" watchObservedRunningTime="2025-11-25 10:47:35.899279243 +0000 UTC m=+953.265874932" Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.852617 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerStarted","Data":"142792dc155f780e17c5b4da679b3a01eee8a486ce94b58ae31734680c3db2c5"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.854811 4702 generic.go:334] "Generic (PLEG): container finished" podID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerID="bf8c5072c415fd49b509745f326edc235cec479d70a76e668101010ed8d05023" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.854914 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerDied","Data":"bf8c5072c415fd49b509745f326edc235cec479d70a76e668101010ed8d05023"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.857028 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerStarted","Data":"f114c1169e82ab1054c255f8010724a1df98d58cb5ec6af5dfd4fba539246375"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.859190 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerStarted","Data":"8d45ea01eb10d4ca40510cd12c7cd02e0492c1c790bbf2485ccc41373e03f7cc"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.861982 4702 generic.go:334] "Generic (PLEG): container finished" podID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerID="e4f00f0f0edd4db95dbac20fdf4690b470be79299126a544101195f3edc2ca94" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.862053 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bc4kb" event={"ID":"9144c34a-7330-4d8b-aaa7-34747a3f4773","Type":"ContainerDied","Data":"e4f00f0f0edd4db95dbac20fdf4690b470be79299126a544101195f3edc2ca94"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.867998 4702 generic.go:334] "Generic (PLEG): container finished" podID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerID="48c0a46061ed4552d4299319479d17c121c2124efa5c49730c68bcefc3c3d155" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.868058 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerDied","Data":"48c0a46061ed4552d4299319479d17c121c2124efa5c49730c68bcefc3c3d155"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.871911 4702 generic.go:334] "Generic (PLEG): container finished" podID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerID="7a60e0f96c7e51154e76c9f2c41558246ce368ce1fad09704b0bcfed01d1aa88" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.871957 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerDied","Data":"7a60e0f96c7e51154e76c9f2c41558246ce368ce1fad09704b0bcfed01d1aa88"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.876372 4702 generic.go:334] "Generic (PLEG): container finished" podID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerID="d6496600281a45a9eea7d24cec4e78e82df67b649d0dac54ff665d8d396cbb97" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.876410 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerDied","Data":"d6496600281a45a9eea7d24cec4e78e82df67b649d0dac54ff665d8d396cbb97"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.882754 4702 generic.go:334] "Generic (PLEG): container finished" podID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerID="42c5968f115a69d31b405bea0397d7b65ed69d2ea54799aa950c09c8f3c12177" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.882872 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerDied","Data":"42c5968f115a69d31b405bea0397d7b65ed69d2ea54799aa950c09c8f3c12177"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.893978 4702 generic.go:334] "Generic (PLEG): container finished" podID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerID="ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a" exitCode=0 Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.894038 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerDied","Data":"ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.897435 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerStarted","Data":"c856ff0ed01792523c5a4ba038f14795e0026e044221a1f7a6cb45c13a0ea0dd"} Nov 25 10:47:36 crc kubenswrapper[4702]: I1125 10:47:36.905479 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerStarted","Data":"3af6bc66bf0ee16d5141b4559355be0f0a4dede7cc13d96bd3995fd1aee9ca27"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.755738 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-tnqkn"] Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.757336 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.776229 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnqkn"] Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.837967 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-catalog-content\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.838029 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-utilities\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.838091 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mls7d\" (UniqueName: \"kubernetes.io/projected/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-kube-api-access-mls7d\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.914659 4702 generic.go:334] "Generic (PLEG): container finished" podID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerID="8d45ea01eb10d4ca40510cd12c7cd02e0492c1c790bbf2485ccc41373e03f7cc" exitCode=0 Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.914743 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerDied","Data":"8d45ea01eb10d4ca40510cd12c7cd02e0492c1c790bbf2485ccc41373e03f7cc"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.917459 4702 generic.go:334] "Generic (PLEG): container finished" podID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerID="c856ff0ed01792523c5a4ba038f14795e0026e044221a1f7a6cb45c13a0ea0dd" exitCode=0 Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.917540 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerDied","Data":"c856ff0ed01792523c5a4ba038f14795e0026e044221a1f7a6cb45c13a0ea0dd"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.922145 4702 generic.go:334] "Generic (PLEG): container finished" podID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerID="3af6bc66bf0ee16d5141b4559355be0f0a4dede7cc13d96bd3995fd1aee9ca27" exitCode=0 Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.922212 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerDied","Data":"3af6bc66bf0ee16d5141b4559355be0f0a4dede7cc13d96bd3995fd1aee9ca27"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.926381 4702 generic.go:334] "Generic (PLEG): container finished" podID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerID="46d2e1826050e681166045d108c5aa17d6f5ae06fd6e1a3dbf4c70ef0ac7e174" exitCode=0 Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.926435 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t6m9v" event={"ID":"2db4d43f-926d-4fba-84d8-e49c594c5026","Type":"ContainerDied","Data":"46d2e1826050e681166045d108c5aa17d6f5ae06fd6e1a3dbf4c70ef0ac7e174"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.934896 4702 generic.go:334] "Generic (PLEG): container finished" podID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerID="142792dc155f780e17c5b4da679b3a01eee8a486ce94b58ae31734680c3db2c5" exitCode=0 Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.935068 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerDied","Data":"142792dc155f780e17c5b4da679b3a01eee8a486ce94b58ae31734680c3db2c5"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.939456 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-utilities\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.939560 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mls7d\" (UniqueName: \"kubernetes.io/projected/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-kube-api-access-mls7d\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.939632 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-catalog-content\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.939981 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-utilities\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.940089 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-catalog-content\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.946595 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerStarted","Data":"69140880a059b9b0997edcf2c7db3d6d74473a42d6d9ac04b96b2106eddb787f"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.949059 4702 generic.go:334] "Generic (PLEG): container finished" podID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerID="f114c1169e82ab1054c255f8010724a1df98d58cb5ec6af5dfd4fba539246375" exitCode=0 Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.949091 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerDied","Data":"f114c1169e82ab1054c255f8010724a1df98d58cb5ec6af5dfd4fba539246375"} Nov 25 10:47:37 crc kubenswrapper[4702]: I1125 10:47:37.986688 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mls7d\" (UniqueName: \"kubernetes.io/projected/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-kube-api-access-mls7d\") pod \"redhat-operators-tnqkn\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.029506 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" podStartSLOduration=16.096856267 podStartE2EDuration="22.029484786s" podCreationTimestamp="2025-11-25 10:47:16 +0000 UTC" firstStartedPulling="2025-11-25 10:47:29.803591557 +0000 UTC m=+947.170187246" lastFinishedPulling="2025-11-25 10:47:35.736220076 +0000 UTC m=+953.102815765" observedRunningTime="2025-11-25 10:47:38.029327481 +0000 UTC m=+955.395923180" watchObservedRunningTime="2025-11-25 10:47:38.029484786 +0000 UTC m=+955.396080475" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.072405 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.321718 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-tnqkn"] Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.552770 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7w9lw"] Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.555117 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.563266 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w9lw"] Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.651498 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-catalog-content\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.651888 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tcg6r\" (UniqueName: \"kubernetes.io/projected/d93c64ec-5a95-4cef-a289-9fee39d7466f-kube-api-access-tcg6r\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.652041 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-utilities\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.666741 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.666777 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.753514 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tcg6r\" (UniqueName: \"kubernetes.io/projected/d93c64ec-5a95-4cef-a289-9fee39d7466f-kube-api-access-tcg6r\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.753637 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-utilities\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.753726 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-catalog-content\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.754297 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-utilities\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.754568 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-catalog-content\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.779266 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tcg6r\" (UniqueName: \"kubernetes.io/projected/d93c64ec-5a95-4cef-a289-9fee39d7466f-kube-api-access-tcg6r\") pod \"redhat-marketplace-7w9lw\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.970425 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:47:38 crc kubenswrapper[4702]: I1125 10:47:38.994792 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerStarted","Data":"88fbaa4787ab3cd6bd9fbd964e31c941c5a428bd9d6377d08c5191c39b9d8d1d"} Nov 25 10:47:39 crc kubenswrapper[4702]: I1125 10:47:39.203146 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w9lw"] Nov 25 10:47:39 crc kubenswrapper[4702]: I1125 10:47:39.710462 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-hlfr2" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:39 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:39 crc kubenswrapper[4702]: > Nov 25 10:47:40 crc kubenswrapper[4702]: I1125 10:47:40.012960 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerStarted","Data":"82122dd77526fef9035b0c8d136e2b837e960f410d5b72dd94297a6249b6bb55"} Nov 25 10:47:40 crc kubenswrapper[4702]: I1125 10:47:40.013271 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerStarted","Data":"6dedcd705d28953e8e9e4e511da93379804c74772b18e382b69b2cf0fb9b2558"} Nov 25 10:47:40 crc kubenswrapper[4702]: I1125 10:47:40.016362 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerStarted","Data":"ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.029705 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerStarted","Data":"573c653b15aecabbbd2f7a3eabf10af2afdee556722d39c25b635acb2f0df0f1"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.033998 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerStarted","Data":"7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.037129 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerStarted","Data":"7af4d97cc51f1fa783f8f8f93574e232b6a510d873e307c657b4efc7abc51c53"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.039544 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerStarted","Data":"9e580642f7e0e972101e0bd568cff0139ad368010c869c8f6dd8f70b5cf2e7d0"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.042637 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerStarted","Data":"c858ee7e4750538a14289f8508294af65523561215da9da608360d5f817c5ab0"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.045501 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bc4kb" event={"ID":"9144c34a-7330-4d8b-aaa7-34747a3f4773","Type":"ContainerStarted","Data":"16722b785a2bd0614ab287bc2171c23bfd494dcb8c89cdb4f1035e29d3f9e099"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.050694 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerStarted","Data":"519963194f3114e18fcc237a4e4105d8409a40d83cce36f15a20e0b5fa5f402e"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.052975 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t6m9v" event={"ID":"2db4d43f-926d-4fba-84d8-e49c594c5026","Type":"ContainerStarted","Data":"180c73a750d26020495abd6139e9e036a1ecb8b9b101a83b25607c3f17a46d08"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.055884 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerStarted","Data":"17eddc39edc726a885e63fe2a81872545ebe140e270e7648c90579fc6878c8d0"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.058360 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerStarted","Data":"286892a795e8026ff2e6bb1fd2e1cd45b0eff5275fc2b779335d0adb4991e590"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.060657 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerStarted","Data":"a4534a8d21211eed9be3946742c09b58c094f230f2780333ba75f9d7ab30b45f"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.066734 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerStarted","Data":"6f606f4f27c6104c8d96f6a3a79c56cecd43939d839a7d854ae41a1538940b0e"} Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.084220 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-bhnmp" podStartSLOduration=12.485639343999999 podStartE2EDuration="23.084203319s" podCreationTimestamp="2025-11-25 10:47:18 +0000 UTC" firstStartedPulling="2025-11-25 10:47:29.803959608 +0000 UTC m=+947.170555297" lastFinishedPulling="2025-11-25 10:47:40.402523583 +0000 UTC m=+957.769119272" observedRunningTime="2025-11-25 10:47:41.056531512 +0000 UTC m=+958.423127211" watchObservedRunningTime="2025-11-25 10:47:41.084203319 +0000 UTC m=+958.450799008" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.084382 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-t6m9v" podStartSLOduration=8.291524375 podStartE2EDuration="13.084378284s" podCreationTimestamp="2025-11-25 10:47:28 +0000 UTC" firstStartedPulling="2025-11-25 10:47:35.814004665 +0000 UTC m=+953.180600354" lastFinishedPulling="2025-11-25 10:47:40.606858574 +0000 UTC m=+957.973454263" observedRunningTime="2025-11-25 10:47:41.08012266 +0000 UTC m=+958.446718369" watchObservedRunningTime="2025-11-25 10:47:41.084378284 +0000 UTC m=+958.450973973" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.170251 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-bc4kb" podStartSLOduration=8.622469289 podStartE2EDuration="18.170235969s" podCreationTimestamp="2025-11-25 10:47:23 +0000 UTC" firstStartedPulling="2025-11-25 10:47:31.113858901 +0000 UTC m=+948.480454590" lastFinishedPulling="2025-11-25 10:47:40.661625581 +0000 UTC m=+958.028221270" observedRunningTime="2025-11-25 10:47:41.166478999 +0000 UTC m=+958.533074688" watchObservedRunningTime="2025-11-25 10:47:41.170235969 +0000 UTC m=+958.536831648" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.171663 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mm67q" podStartSLOduration=7.512092865 podStartE2EDuration="18.17165725s" podCreationTimestamp="2025-11-25 10:47:23 +0000 UTC" firstStartedPulling="2025-11-25 10:47:29.804198174 +0000 UTC m=+947.170793873" lastFinishedPulling="2025-11-25 10:47:40.463762569 +0000 UTC m=+957.830358258" observedRunningTime="2025-11-25 10:47:41.143816738 +0000 UTC m=+958.510412417" watchObservedRunningTime="2025-11-25 10:47:41.17165725 +0000 UTC m=+958.538253039" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.195138 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ggkwj" podStartSLOduration=9.895921863 podStartE2EDuration="20.195120455s" podCreationTimestamp="2025-11-25 10:47:21 +0000 UTC" firstStartedPulling="2025-11-25 10:47:29.803939967 +0000 UTC m=+947.170535656" lastFinishedPulling="2025-11-25 10:47:40.103138559 +0000 UTC m=+957.469734248" observedRunningTime="2025-11-25 10:47:41.190130499 +0000 UTC m=+958.556726208" watchObservedRunningTime="2025-11-25 10:47:41.195120455 +0000 UTC m=+958.561716144" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.245722 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-kd7rs" podStartSLOduration=12.143438722 podStartE2EDuration="23.24570397s" podCreationTimestamp="2025-11-25 10:47:18 +0000 UTC" firstStartedPulling="2025-11-25 10:47:28.915757607 +0000 UTC m=+946.282353296" lastFinishedPulling="2025-11-25 10:47:40.018022855 +0000 UTC m=+957.384618544" observedRunningTime="2025-11-25 10:47:41.242397974 +0000 UTC m=+958.608993663" watchObservedRunningTime="2025-11-25 10:47:41.24570397 +0000 UTC m=+958.612299659" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.249208 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wzfg8" podStartSLOduration=13.876223824 podStartE2EDuration="26.249197562s" podCreationTimestamp="2025-11-25 10:47:15 +0000 UTC" firstStartedPulling="2025-11-25 10:47:27.509840873 +0000 UTC m=+944.876436572" lastFinishedPulling="2025-11-25 10:47:39.882814611 +0000 UTC m=+957.249410310" observedRunningTime="2025-11-25 10:47:41.217849298 +0000 UTC m=+958.584444997" watchObservedRunningTime="2025-11-25 10:47:41.249197562 +0000 UTC m=+958.615793251" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.302072 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-9ps98" podStartSLOduration=9.434840902 podStartE2EDuration="20.302056804s" podCreationTimestamp="2025-11-25 10:47:21 +0000 UTC" firstStartedPulling="2025-11-25 10:47:29.803566316 +0000 UTC m=+947.170162005" lastFinishedPulling="2025-11-25 10:47:40.670782218 +0000 UTC m=+958.037377907" observedRunningTime="2025-11-25 10:47:41.28065185 +0000 UTC m=+958.647247539" watchObservedRunningTime="2025-11-25 10:47:41.302056804 +0000 UTC m=+958.668652503" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.345041 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-wrrrc" podStartSLOduration=15.835038867 podStartE2EDuration="26.345020908s" podCreationTimestamp="2025-11-25 10:47:15 +0000 UTC" firstStartedPulling="2025-11-25 10:47:29.803571046 +0000 UTC m=+947.170166735" lastFinishedPulling="2025-11-25 10:47:40.313553087 +0000 UTC m=+957.680148776" observedRunningTime="2025-11-25 10:47:41.342536745 +0000 UTC m=+958.709132434" watchObservedRunningTime="2025-11-25 10:47:41.345020908 +0000 UTC m=+958.711616597" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.476186 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.476239 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.672609 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:41 crc kubenswrapper[4702]: I1125 10:47:41.672972 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.070195 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.070240 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.119628 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.267117 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.267202 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.348261 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.525819 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-ggkwj" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:42 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:42 crc kubenswrapper[4702]: > Nov 25 10:47:42 crc kubenswrapper[4702]: I1125 10:47:42.722271 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-9ps98" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:42 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:42 crc kubenswrapper[4702]: > Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.080670 4702 generic.go:334] "Generic (PLEG): container finished" podID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerID="82122dd77526fef9035b0c8d136e2b837e960f410d5b72dd94297a6249b6bb55" exitCode=0 Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.080735 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerDied","Data":"82122dd77526fef9035b0c8d136e2b837e960f410d5b72dd94297a6249b6bb55"} Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.082405 4702 generic.go:334] "Generic (PLEG): container finished" podID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerID="ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94" exitCode=0 Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.082476 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerDied","Data":"ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94"} Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.136416 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.143568 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.590978 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.591048 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.869247 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.869290 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:43 crc kubenswrapper[4702]: I1125 10:47:43.910184 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.080117 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.080633 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.094047 4702 generic.go:334] "Generic (PLEG): container finished" podID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerID="286892a795e8026ff2e6bb1fd2e1cd45b0eff5275fc2b779335d0adb4991e590" exitCode=0 Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.094124 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerDied","Data":"286892a795e8026ff2e6bb1fd2e1cd45b0eff5275fc2b779335d0adb4991e590"} Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.100529 4702 generic.go:334] "Generic (PLEG): container finished" podID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerID="c858ee7e4750538a14289f8508294af65523561215da9da608360d5f817c5ab0" exitCode=0 Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.100625 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerDied","Data":"c858ee7e4750538a14289f8508294af65523561215da9da608360d5f817c5ab0"} Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.110890 4702 generic.go:334] "Generic (PLEG): container finished" podID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerID="69140880a059b9b0997edcf2c7db3d6d74473a42d6d9ac04b96b2106eddb787f" exitCode=0 Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.111097 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerDied","Data":"69140880a059b9b0997edcf2c7db3d6d74473a42d6d9ac04b96b2106eddb787f"} Nov 25 10:47:44 crc kubenswrapper[4702]: I1125 10:47:44.124663 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.399087 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.503189 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-bundle\") pod \"a065282b-3c41-4b07-93b3-e29e2502e89a\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.503350 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-util\") pod \"a065282b-3c41-4b07-93b3-e29e2502e89a\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.503373 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4cp2t\" (UniqueName: \"kubernetes.io/projected/a065282b-3c41-4b07-93b3-e29e2502e89a-kube-api-access-4cp2t\") pod \"a065282b-3c41-4b07-93b3-e29e2502e89a\" (UID: \"a065282b-3c41-4b07-93b3-e29e2502e89a\") " Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.513273 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a065282b-3c41-4b07-93b3-e29e2502e89a-kube-api-access-4cp2t" (OuterVolumeSpecName: "kube-api-access-4cp2t") pod "a065282b-3c41-4b07-93b3-e29e2502e89a" (UID: "a065282b-3c41-4b07-93b3-e29e2502e89a"). InnerVolumeSpecName "kube-api-access-4cp2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.519454 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-util" (OuterVolumeSpecName: "util") pod "a065282b-3c41-4b07-93b3-e29e2502e89a" (UID: "a065282b-3c41-4b07-93b3-e29e2502e89a"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.604743 4702 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-util\") on node \"crc\" DevicePath \"\"" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.604782 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4cp2t\" (UniqueName: \"kubernetes.io/projected/a065282b-3c41-4b07-93b3-e29e2502e89a-kube-api-access-4cp2t\") on node \"crc\" DevicePath \"\"" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.877024 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.877099 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.934419 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.969045 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:45 crc kubenswrapper[4702]: I1125 10:47:45.969100 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.127331 4702 generic.go:334] "Generic (PLEG): container finished" podID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerID="a4534a8d21211eed9be3946742c09b58c094f230f2780333ba75f9d7ab30b45f" exitCode=0 Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.127704 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerDied","Data":"a4534a8d21211eed9be3946742c09b58c094f230f2780333ba75f9d7ab30b45f"} Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.131255 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" event={"ID":"a065282b-3c41-4b07-93b3-e29e2502e89a","Type":"ContainerDied","Data":"15c0794b06b744917e28e5b7702630378cf9cdbbe610cb37125da39cb308c4e3"} Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.131405 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="15c0794b06b744917e28e5b7702630378cf9cdbbe610cb37125da39cb308c4e3" Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.131638 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq" Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.182311 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.664709 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-bundle" (OuterVolumeSpecName: "bundle") pod "a065282b-3c41-4b07-93b3-e29e2502e89a" (UID: "a065282b-3c41-4b07-93b3-e29e2502e89a"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:47:46 crc kubenswrapper[4702]: I1125 10:47:46.737747 4702 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/a065282b-3c41-4b07-93b3-e29e2502e89a-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:47:47 crc kubenswrapper[4702]: I1125 10:47:47.012248 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-wrrrc" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="registry-server" probeResult="failure" output=< Nov 25 10:47:47 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:47:47 crc kubenswrapper[4702]: > Nov 25 10:47:48 crc kubenswrapper[4702]: I1125 10:47:48.466548 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:48 crc kubenswrapper[4702]: I1125 10:47:48.467485 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:48 crc kubenswrapper[4702]: I1125 10:47:48.517827 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:48 crc kubenswrapper[4702]: I1125 10:47:48.707422 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:48 crc kubenswrapper[4702]: I1125 10:47:48.771526 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.068507 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.068559 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.108167 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.185194 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.191375 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.422167 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.423347 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:49 crc kubenswrapper[4702]: I1125 10:47:49.469322 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:50 crc kubenswrapper[4702]: I1125 10:47:50.207092 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:47:51 crc kubenswrapper[4702]: I1125 10:47:51.529646 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:51 crc kubenswrapper[4702]: I1125 10:47:51.574278 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:47:51 crc kubenswrapper[4702]: I1125 10:47:51.713891 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:51 crc kubenswrapper[4702]: I1125 10:47:51.750721 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:47:53 crc kubenswrapper[4702]: I1125 10:47:53.918350 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:47:54 crc kubenswrapper[4702]: I1125 10:47:54.134028 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.010295 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.059751 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.187569 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4"] Nov 25 10:47:56 crc kubenswrapper[4702]: E1125 10:47:56.188415 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="util" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.188438 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="util" Nov 25 10:47:56 crc kubenswrapper[4702]: E1125 10:47:56.188472 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="pull" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.188481 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="pull" Nov 25 10:47:56 crc kubenswrapper[4702]: E1125 10:47:56.188513 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="extract" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.188522 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="extract" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.188848 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a065282b-3c41-4b07-93b3-e29e2502e89a" containerName="extract" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.189636 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.206195 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4"] Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.206875 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.207088 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.207235 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.207375 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.207518 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-glvsn" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.289778 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7dqpx\" (UniqueName: \"kubernetes.io/projected/a6507463-185a-40db-9736-bfcc4f0928e9-kube-api-access-7dqpx\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.290299 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a6507463-185a-40db-9736-bfcc4f0928e9-apiservice-cert\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.290459 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a6507463-185a-40db-9736-bfcc4f0928e9-webhook-cert\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.391418 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a6507463-185a-40db-9736-bfcc4f0928e9-apiservice-cert\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.391463 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a6507463-185a-40db-9736-bfcc4f0928e9-webhook-cert\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.391565 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7dqpx\" (UniqueName: \"kubernetes.io/projected/a6507463-185a-40db-9736-bfcc4f0928e9-kube-api-access-7dqpx\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.398025 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a6507463-185a-40db-9736-bfcc4f0928e9-webhook-cert\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.398781 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a6507463-185a-40db-9736-bfcc4f0928e9-apiservice-cert\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.412977 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7dqpx\" (UniqueName: \"kubernetes.io/projected/a6507463-185a-40db-9736-bfcc4f0928e9-kube-api-access-7dqpx\") pod \"metallb-operator-controller-manager-d9d597dc8-q6ll4\" (UID: \"a6507463-185a-40db-9736-bfcc4f0928e9\") " pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.490019 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv"] Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.491283 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.493168 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.493549 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-zsqrf" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.493844 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.503773 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv"] Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.522328 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.594394 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jgmsh\" (UniqueName: \"kubernetes.io/projected/f587e69f-3aaf-403e-a060-bf4542e19ec8-kube-api-access-jgmsh\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.594511 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f587e69f-3aaf-403e-a060-bf4542e19ec8-webhook-cert\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.594655 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f587e69f-3aaf-403e-a060-bf4542e19ec8-apiservice-cert\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.695841 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f587e69f-3aaf-403e-a060-bf4542e19ec8-webhook-cert\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.695923 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f587e69f-3aaf-403e-a060-bf4542e19ec8-apiservice-cert\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.696044 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jgmsh\" (UniqueName: \"kubernetes.io/projected/f587e69f-3aaf-403e-a060-bf4542e19ec8-kube-api-access-jgmsh\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.700000 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f587e69f-3aaf-403e-a060-bf4542e19ec8-apiservice-cert\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.701523 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f587e69f-3aaf-403e-a060-bf4542e19ec8-webhook-cert\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.714864 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jgmsh\" (UniqueName: \"kubernetes.io/projected/f587e69f-3aaf-403e-a060-bf4542e19ec8-kube-api-access-jgmsh\") pod \"metallb-operator-webhook-server-5c9db6d78-h6mmv\" (UID: \"f587e69f-3aaf-403e-a060-bf4542e19ec8\") " pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:47:56 crc kubenswrapper[4702]: I1125 10:47:56.826566 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:48:04 crc kubenswrapper[4702]: I1125 10:48:04.934023 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4"] Nov 25 10:48:06 crc kubenswrapper[4702]: W1125 10:48:06.032149 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda6507463_185a_40db_9736_bfcc4f0928e9.slice/crio-9124fd0c19866b5ddf2a655a591f49c005a7b16bf4f760489dc01794d49792f2 WatchSource:0}: Error finding container 9124fd0c19866b5ddf2a655a591f49c005a7b16bf4f760489dc01794d49792f2: Status 404 returned error can't find the container with id 9124fd0c19866b5ddf2a655a591f49c005a7b16bf4f760489dc01794d49792f2 Nov 25 10:48:06 crc kubenswrapper[4702]: I1125 10:48:06.312870 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" event={"ID":"a6507463-185a-40db-9736-bfcc4f0928e9","Type":"ContainerStarted","Data":"9124fd0c19866b5ddf2a655a591f49c005a7b16bf4f760489dc01794d49792f2"} Nov 25 10:48:08 crc kubenswrapper[4702]: I1125 10:48:08.363536 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv"] Nov 25 10:48:09 crc kubenswrapper[4702]: I1125 10:48:09.348227 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" event={"ID":"f587e69f-3aaf-403e-a060-bf4542e19ec8","Type":"ContainerStarted","Data":"4c1ae56390ccd40a168b5e0a05fa8516481163def379953126ecd8cab9e07586"} Nov 25 10:48:10 crc kubenswrapper[4702]: I1125 10:48:10.357551 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerStarted","Data":"037b3c1c6f2324880250d8e04d17f81303340b48494297365008ae09108c3bc0"} Nov 25 10:48:10 crc kubenswrapper[4702]: I1125 10:48:10.359816 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerStarted","Data":"428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb"} Nov 25 10:48:10 crc kubenswrapper[4702]: I1125 10:48:10.362222 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerStarted","Data":"d53a99b17509fb3e48c6519c05d8fe60358a92d7ac6f58643b9ac68c0113b550"} Nov 25 10:48:10 crc kubenswrapper[4702]: I1125 10:48:10.365016 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerStarted","Data":"73d0421047e9319ed5185bfb7113f9a3677c7da744fcf150b2a0f9017f233213"} Nov 25 10:48:10 crc kubenswrapper[4702]: I1125 10:48:10.367016 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerStarted","Data":"e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e"} Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.379683 4702 generic.go:334] "Generic (PLEG): container finished" podID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerID="037b3c1c6f2324880250d8e04d17f81303340b48494297365008ae09108c3bc0" exitCode=0 Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.379740 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerDied","Data":"037b3c1c6f2324880250d8e04d17f81303340b48494297365008ae09108c3bc0"} Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.383398 4702 generic.go:334] "Generic (PLEG): container finished" podID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerID="428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb" exitCode=0 Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.384504 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerDied","Data":"428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb"} Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.427958 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-gzp7g" podStartSLOduration=16.555275897 podStartE2EDuration="44.427936347s" podCreationTimestamp="2025-11-25 10:47:27 +0000 UTC" firstStartedPulling="2025-11-25 10:47:36.857492065 +0000 UTC m=+954.224087754" lastFinishedPulling="2025-11-25 10:48:04.730152505 +0000 UTC m=+982.096748204" observedRunningTime="2025-11-25 10:48:11.426330105 +0000 UTC m=+988.792925814" watchObservedRunningTime="2025-11-25 10:48:11.427936347 +0000 UTC m=+988.794532036" Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.444307 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-zs89l" podStartSLOduration=16.800703965 podStartE2EDuration="38.444290679s" podCreationTimestamp="2025-11-25 10:47:33 +0000 UTC" firstStartedPulling="2025-11-25 10:47:36.873262475 +0000 UTC m=+954.239858164" lastFinishedPulling="2025-11-25 10:47:58.516849189 +0000 UTC m=+975.883444878" observedRunningTime="2025-11-25 10:48:11.442735358 +0000 UTC m=+988.809331047" watchObservedRunningTime="2025-11-25 10:48:11.444290679 +0000 UTC m=+988.810886368" Nov 25 10:48:11 crc kubenswrapper[4702]: I1125 10:48:11.462662 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-gc8zv" podStartSLOduration=8.341510316 podStartE2EDuration="39.462646644s" podCreationTimestamp="2025-11-25 10:47:32 +0000 UTC" firstStartedPulling="2025-11-25 10:47:36.899276094 +0000 UTC m=+954.265871793" lastFinishedPulling="2025-11-25 10:48:08.020412442 +0000 UTC m=+985.387008121" observedRunningTime="2025-11-25 10:48:11.456701937 +0000 UTC m=+988.823297626" watchObservedRunningTime="2025-11-25 10:48:11.462646644 +0000 UTC m=+988.829242333" Nov 25 10:48:12 crc kubenswrapper[4702]: I1125 10:48:12.865996 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:48:12 crc kubenswrapper[4702]: I1125 10:48:12.866854 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:48:12 crc kubenswrapper[4702]: I1125 10:48:12.917819 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:48:13 crc kubenswrapper[4702]: I1125 10:48:13.517999 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:48:13 crc kubenswrapper[4702]: I1125 10:48:13.518292 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:48:13 crc kubenswrapper[4702]: I1125 10:48:13.559737 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:48:13 crc kubenswrapper[4702]: I1125 10:48:13.590616 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:48:13 crc kubenswrapper[4702]: I1125 10:48:13.590676 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:48:14 crc kubenswrapper[4702]: I1125 10:48:14.457785 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:48:14 crc kubenswrapper[4702]: I1125 10:48:14.467350 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.441685 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerStarted","Data":"bc01dd268d4cfd6f44de94fa63a3fac8de6a9743f855f1ae210d05d66f2841f2"} Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.444493 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerStarted","Data":"bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343"} Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.708836 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h8j5p"] Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.709092 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-h8j5p" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="registry-server" containerID="cri-o://fd8f6e4ff16f28dbaeeb5cfe7a68bf8676872b8db7a5e443ec70d338b7298c8c" gracePeriod=2 Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.879768 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.880137 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.924089 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gc8zv"] Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.924297 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-gc8zv" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="registry-server" containerID="cri-o://d53a99b17509fb3e48c6519c05d8fe60358a92d7ac6f58643b9ac68c0113b550" gracePeriod=2 Nov 25 10:48:17 crc kubenswrapper[4702]: I1125 10:48:17.953699 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.325310 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm67q"] Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.325581 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mm67q" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="registry-server" containerID="cri-o://7af4d97cc51f1fa783f8f8f93574e232b6a510d873e307c657b4efc7abc51c53" gracePeriod=2 Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.486621 4702 generic.go:334] "Generic (PLEG): container finished" podID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerID="d53a99b17509fb3e48c6519c05d8fe60358a92d7ac6f58643b9ac68c0113b550" exitCode=0 Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.486781 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerDied","Data":"d53a99b17509fb3e48c6519c05d8fe60358a92d7ac6f58643b9ac68c0113b550"} Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.511185 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46fx5"] Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.512041 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-46fx5" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="registry-server" containerID="cri-o://28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b" gracePeriod=2 Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.523615 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7w9lw" podStartSLOduration=7.002351361 podStartE2EDuration="40.523596071s" podCreationTimestamp="2025-11-25 10:47:38 +0000 UTC" firstStartedPulling="2025-11-25 10:47:43.082457894 +0000 UTC m=+960.449053583" lastFinishedPulling="2025-11-25 10:48:16.603702604 +0000 UTC m=+993.970298293" observedRunningTime="2025-11-25 10:48:18.517438389 +0000 UTC m=+995.884034078" watchObservedRunningTime="2025-11-25 10:48:18.523596071 +0000 UTC m=+995.890191760" Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.545987 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.563740 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-tnqkn" podStartSLOduration=8.060513156 podStartE2EDuration="41.563720421s" podCreationTimestamp="2025-11-25 10:47:37 +0000 UTC" firstStartedPulling="2025-11-25 10:47:43.085642776 +0000 UTC m=+960.452238465" lastFinishedPulling="2025-11-25 10:48:16.588850041 +0000 UTC m=+993.955445730" observedRunningTime="2025-11-25 10:48:18.557597259 +0000 UTC m=+995.924193028" watchObservedRunningTime="2025-11-25 10:48:18.563720421 +0000 UTC m=+995.930316100" Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.928529 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wg9lr"] Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.929089 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-wg9lr" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="registry-server" containerID="cri-o://b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089" gracePeriod=2 Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.971023 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:48:18 crc kubenswrapper[4702]: I1125 10:48:18.971072 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:48:19 crc kubenswrapper[4702]: I1125 10:48:19.108809 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bc4kb"] Nov 25 10:48:19 crc kubenswrapper[4702]: I1125 10:48:19.109073 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-bc4kb" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="registry-server" containerID="cri-o://16722b785a2bd0614ab287bc2171c23bfd494dcb8c89cdb4f1035e29d3f9e099" gracePeriod=2 Nov 25 10:48:19 crc kubenswrapper[4702]: I1125 10:48:19.507743 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bm7kf"] Nov 25 10:48:19 crc kubenswrapper[4702]: I1125 10:48:19.508081 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bm7kf" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="registry-server" containerID="cri-o://236243b8156d04bd328f673aca75baf2c2740114f60068f5746ed9b74f85ad15" gracePeriod=2 Nov 25 10:48:20 crc kubenswrapper[4702]: I1125 10:48:20.020584 4702 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-marketplace-7w9lw" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="registry-server" probeResult="failure" output=< Nov 25 10:48:20 crc kubenswrapper[4702]: timeout: failed to connect service ":50051" within 1s Nov 25 10:48:20 crc kubenswrapper[4702]: > Nov 25 10:48:20 crc kubenswrapper[4702]: I1125 10:48:20.104882 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2x6vv"] Nov 25 10:48:20 crc kubenswrapper[4702]: I1125 10:48:20.105160 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2x6vv" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="registry-server" containerID="cri-o://bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06" gracePeriod=2 Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.274712 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089 is running failed: container process not found" containerID="b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.275296 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089 is running failed: container process not found" containerID="b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.275809 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089 is running failed: container process not found" containerID="b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.275854 4702 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-wg9lr" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="registry-server" Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.462657 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b is running failed: container process not found" containerID="28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.462968 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b is running failed: container process not found" containerID="28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.463405 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b is running failed: container process not found" containerID="28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:20 crc kubenswrapper[4702]: E1125 10:48:20.463445 4702 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/community-operators-46fx5" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="registry-server" Nov 25 10:48:20 crc kubenswrapper[4702]: I1125 10:48:20.704944 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zcs6g"] Nov 25 10:48:20 crc kubenswrapper[4702]: I1125 10:48:20.705665 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zcs6g" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="registry-server" containerID="cri-o://03247ff6e8c0d7878d3dd0d3e07c678dbcf64411ff7e81b07330c010096be7a7" gracePeriod=2 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.306814 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bhnmp"] Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.307114 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-bhnmp" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="registry-server" containerID="cri-o://573c653b15aecabbbd2f7a3eabf10af2afdee556722d39c25b635acb2f0df0f1" gracePeriod=2 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.521601 4702 generic.go:334] "Generic (PLEG): container finished" podID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerID="573c653b15aecabbbd2f7a3eabf10af2afdee556722d39c25b635acb2f0df0f1" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.521672 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerDied","Data":"573c653b15aecabbbd2f7a3eabf10af2afdee556722d39c25b635acb2f0df0f1"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.527563 4702 generic.go:334] "Generic (PLEG): container finished" podID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerID="fd8f6e4ff16f28dbaeeb5cfe7a68bf8676872b8db7a5e443ec70d338b7298c8c" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.527684 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerDied","Data":"fd8f6e4ff16f28dbaeeb5cfe7a68bf8676872b8db7a5e443ec70d338b7298c8c"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.534790 4702 generic.go:334] "Generic (PLEG): container finished" podID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerID="7af4d97cc51f1fa783f8f8f93574e232b6a510d873e307c657b4efc7abc51c53" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.534847 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerDied","Data":"7af4d97cc51f1fa783f8f8f93574e232b6a510d873e307c657b4efc7abc51c53"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.534872 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mm67q" event={"ID":"86eb5301-b8dd-4784-81c9-56375cbe983d","Type":"ContainerDied","Data":"147484e03d90c3228a9cddb87fc206875291fbe378e894b9ed1f5352390fc498"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.534882 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="147484e03d90c3228a9cddb87fc206875291fbe378e894b9ed1f5352390fc498" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.536824 4702 generic.go:334] "Generic (PLEG): container finished" podID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerID="28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.536871 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46fx5" event={"ID":"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af","Type":"ContainerDied","Data":"28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.540181 4702 generic.go:334] "Generic (PLEG): container finished" podID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerID="b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.540413 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg9lr" event={"ID":"c0738bd9-a74b-4aaa-a885-eae81ea2dc35","Type":"ContainerDied","Data":"b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.544590 4702 generic.go:334] "Generic (PLEG): container finished" podID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerID="236243b8156d04bd328f673aca75baf2c2740114f60068f5746ed9b74f85ad15" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.544649 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerDied","Data":"236243b8156d04bd328f673aca75baf2c2740114f60068f5746ed9b74f85ad15"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.547460 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-gc8zv" event={"ID":"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62","Type":"ContainerDied","Data":"eb8c1b37a991db0e80e8ee835c374dc4242d62182087166b699b9f3ff573e530"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.547497 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="eb8c1b37a991db0e80e8ee835c374dc4242d62182087166b699b9f3ff573e530" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.564045 4702 generic.go:334] "Generic (PLEG): container finished" podID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerID="03247ff6e8c0d7878d3dd0d3e07c678dbcf64411ff7e81b07330c010096be7a7" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.564102 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerDied","Data":"03247ff6e8c0d7878d3dd0d3e07c678dbcf64411ff7e81b07330c010096be7a7"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.571140 4702 generic.go:334] "Generic (PLEG): container finished" podID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerID="bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.571211 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerDied","Data":"bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.574184 4702 generic.go:334] "Generic (PLEG): container finished" podID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerID="16722b785a2bd0614ab287bc2171c23bfd494dcb8c89cdb4f1035e29d3f9e099" exitCode=0 Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.574218 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bc4kb" event={"ID":"9144c34a-7330-4d8b-aaa7-34747a3f4773","Type":"ContainerDied","Data":"16722b785a2bd0614ab287bc2171c23bfd494dcb8c89cdb4f1035e29d3f9e099"} Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.652006 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.675253 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.804041 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-catalog-content\") pod \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.804121 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-catalog-content\") pod \"86eb5301-b8dd-4784-81c9-56375cbe983d\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.804168 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-utilities\") pod \"86eb5301-b8dd-4784-81c9-56375cbe983d\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.804258 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xr6cx\" (UniqueName: \"kubernetes.io/projected/86eb5301-b8dd-4784-81c9-56375cbe983d-kube-api-access-xr6cx\") pod \"86eb5301-b8dd-4784-81c9-56375cbe983d\" (UID: \"86eb5301-b8dd-4784-81c9-56375cbe983d\") " Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.804321 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-utilities\") pod \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.804424 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5vgc7\" (UniqueName: \"kubernetes.io/projected/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-kube-api-access-5vgc7\") pod \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\" (UID: \"0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62\") " Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.805508 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-utilities" (OuterVolumeSpecName: "utilities") pod "86eb5301-b8dd-4784-81c9-56375cbe983d" (UID: "86eb5301-b8dd-4784-81c9-56375cbe983d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.815231 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-utilities" (OuterVolumeSpecName: "utilities") pod "0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" (UID: "0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.818658 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86eb5301-b8dd-4784-81c9-56375cbe983d-kube-api-access-xr6cx" (OuterVolumeSpecName: "kube-api-access-xr6cx") pod "86eb5301-b8dd-4784-81c9-56375cbe983d" (UID: "86eb5301-b8dd-4784-81c9-56375cbe983d"). InnerVolumeSpecName "kube-api-access-xr6cx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.823627 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.826868 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-kube-api-access-5vgc7" (OuterVolumeSpecName: "kube-api-access-5vgc7") pod "0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" (UID: "0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62"). InnerVolumeSpecName "kube-api-access-5vgc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.879224 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" (UID: "0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.905664 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xr6cx\" (UniqueName: \"kubernetes.io/projected/86eb5301-b8dd-4784-81c9-56375cbe983d-kube-api-access-xr6cx\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.905708 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.905728 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5vgc7\" (UniqueName: \"kubernetes.io/projected/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-kube-api-access-5vgc7\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.905737 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.905745 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.922796 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zs89l"] Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.923129 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "86eb5301-b8dd-4784-81c9-56375cbe983d" (UID: "86eb5301-b8dd-4784-81c9-56375cbe983d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:21 crc kubenswrapper[4702]: I1125 10:48:21.923163 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-zs89l" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="registry-server" containerID="cri-o://e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e" gracePeriod=2 Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.007260 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-utilities\") pod \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.007342 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-catalog-content\") pod \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.007461 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d46tg\" (UniqueName: \"kubernetes.io/projected/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-kube-api-access-d46tg\") pod \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\" (UID: \"a7287d27-2a96-42f3-b8c5-1ca79d5c422a\") " Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.007808 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/86eb5301-b8dd-4784-81c9-56375cbe983d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.008423 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-utilities" (OuterVolumeSpecName: "utilities") pod "a7287d27-2a96-42f3-b8c5-1ca79d5c422a" (UID: "a7287d27-2a96-42f3-b8c5-1ca79d5c422a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.012472 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-kube-api-access-d46tg" (OuterVolumeSpecName: "kube-api-access-d46tg") pod "a7287d27-2a96-42f3-b8c5-1ca79d5c422a" (UID: "a7287d27-2a96-42f3-b8c5-1ca79d5c422a"). InnerVolumeSpecName "kube-api-access-d46tg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.082482 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a7287d27-2a96-42f3-b8c5-1ca79d5c422a" (UID: "a7287d27-2a96-42f3-b8c5-1ca79d5c422a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.110738 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.110775 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.110790 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d46tg\" (UniqueName: \"kubernetes.io/projected/a7287d27-2a96-42f3-b8c5-1ca79d5c422a-kube-api-access-d46tg\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.508296 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2dxxh"] Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.508531 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-2dxxh" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="registry-server" containerID="cri-o://7ffa06af9e254343a7e2b52dc964d06ce8cf159a6678e9676dcf87583f439450" gracePeriod=2 Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.592580 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bm7kf" event={"ID":"a7287d27-2a96-42f3-b8c5-1ca79d5c422a","Type":"ContainerDied","Data":"8cecf1eb557baae88dd7b97618660ea11ff85eea5bf4b8f293380f1f4a6828c2"} Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.592643 4702 scope.go:117] "RemoveContainer" containerID="236243b8156d04bd328f673aca75baf2c2740114f60068f5746ed9b74f85ad15" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.593120 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bm7kf" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.606513 4702 generic.go:334] "Generic (PLEG): container finished" podID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerID="e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e" exitCode=0 Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.606641 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mm67q" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.608184 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerDied","Data":"e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e"} Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.608518 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-gc8zv" Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.646536 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bm7kf"] Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.655112 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bm7kf"] Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.670694 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mm67q"] Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.680982 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mm67q"] Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.688291 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-gc8zv"] Nov 25 10:48:22 crc kubenswrapper[4702]: I1125 10:48:22.696673 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-gc8zv"] Nov 25 10:48:22 crc kubenswrapper[4702]: E1125 10:48:22.862551 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06 is running failed: container process not found" containerID="bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:22 crc kubenswrapper[4702]: E1125 10:48:22.863286 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06 is running failed: container process not found" containerID="bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:22 crc kubenswrapper[4702]: E1125 10:48:22.863832 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06 is running failed: container process not found" containerID="bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:22 crc kubenswrapper[4702]: E1125 10:48:22.863866 4702 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-2x6vv" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="registry-server" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.109553 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6blk"] Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.109778 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-x6blk" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="registry-server" containerID="cri-o://b5e5f66efdaedeb410a56f03c27c69f594fc16c7b4ab2ece72166007fea6b432" gracePeriod=2 Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.415328 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" path="/var/lib/kubelet/pods/0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62/volumes" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.416083 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" path="/var/lib/kubelet/pods/86eb5301-b8dd-4784-81c9-56375cbe983d/volumes" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.417149 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" path="/var/lib/kubelet/pods/a7287d27-2a96-42f3-b8c5-1ca79d5c422a/volumes" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.490845 4702 scope.go:117] "RemoveContainer" containerID="0f09617b9671c1b0fa766ff2b41932af62567280081cfce2590adf120e7a71d8" Nov 25 10:48:23 crc kubenswrapper[4702]: E1125 10:48:23.519156 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e is running failed: container process not found" containerID="e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:23 crc kubenswrapper[4702]: E1125 10:48:23.520152 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e is running failed: container process not found" containerID="e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:23 crc kubenswrapper[4702]: E1125 10:48:23.520561 4702 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e is running failed: container process not found" containerID="e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e" cmd=["grpc_health_probe","-addr=:50051"] Nov 25 10:48:23 crc kubenswrapper[4702]: E1125 10:48:23.520595 4702 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/certified-operators-zs89l" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="registry-server" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.545213 4702 scope.go:117] "RemoveContainer" containerID="60ab00391af19ab4af6406c3e0a1d237f8e87dbeb6fb707b0bbbe519675c259b" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.556459 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.567689 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.582707 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.597769 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.599462 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.613072 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.613706 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.618972 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-bhnmp" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.618992 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-bhnmp" event={"ID":"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9","Type":"ContainerDied","Data":"af4a81384ce5f6a82966d479328a1989f4787122f158e5c562c8130ad3f9216e"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.619056 4702 scope.go:117] "RemoveContainer" containerID="573c653b15aecabbbd2f7a3eabf10af2afdee556722d39c25b635acb2f0df0f1" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.622310 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-bc4kb" event={"ID":"9144c34a-7330-4d8b-aaa7-34747a3f4773","Type":"ContainerDied","Data":"c518dd51e4a58f27c5d11c86840b89a9e76d38dc1b0335f40747493cb9486a46"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.622368 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-bc4kb" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.632703 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-h8j5p" event={"ID":"69f8b3df-cceb-485b-b985-7bdad0788aef","Type":"ContainerDied","Data":"8f89130a0f49d3d5aae94b95273388eb46ddd4537c19cc7d3a043d807853d302"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.632807 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-h8j5p" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.648003 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-wg9lr" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.647993 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-wg9lr" event={"ID":"c0738bd9-a74b-4aaa-a885-eae81ea2dc35","Type":"ContainerDied","Data":"c94948f12b2d345b12227c4cb12d8bb51dbb5496a0dfb9f1811db1a9deb87700"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.651240 4702 scope.go:117] "RemoveContainer" containerID="8d45ea01eb10d4ca40510cd12c7cd02e0492c1c790bbf2485ccc41373e03f7cc" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.657016 4702 generic.go:334] "Generic (PLEG): container finished" podID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerID="b5e5f66efdaedeb410a56f03c27c69f594fc16c7b4ab2ece72166007fea6b432" exitCode=0 Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.657144 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerDied","Data":"b5e5f66efdaedeb410a56f03c27c69f594fc16c7b4ab2ece72166007fea6b432"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.670301 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2x6vv" event={"ID":"e57438f5-de09-4857-b5fc-e67b4c8c443d","Type":"ContainerDied","Data":"e8f8439a54e2ebc88a9d8372bfcaa4739c039f4b98a62b9fee947407a1fd2142"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.670419 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2x6vv" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.683397 4702 generic.go:334] "Generic (PLEG): container finished" podID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerID="7ffa06af9e254343a7e2b52dc964d06ce8cf159a6678e9676dcf87583f439450" exitCode=0 Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.683529 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerDied","Data":"7ffa06af9e254343a7e2b52dc964d06ce8cf159a6678e9676dcf87583f439450"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.687237 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zcs6g" event={"ID":"de760b35-119f-4975-8eeb-76e8f9adb9f1","Type":"ContainerDied","Data":"03843430be3faae62305e427e7079166c795eff6ba79717eecb695066dddeb9c"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.689109 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zcs6g" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.700719 4702 scope.go:117] "RemoveContainer" containerID="312d37902ebc0707e1ceb80bce86f9b69a3bfb63169ac3a8bd79ae9b8002920f" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.703736 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-46fx5" event={"ID":"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af","Type":"ContainerDied","Data":"5dc9039e236ca878f259055b1698fa5677703bc1911b8e3cf83fbb670912d99a"} Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.703877 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-46fx5" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736430 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-utilities\") pod \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736480 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zfdjw\" (UniqueName: \"kubernetes.io/projected/69f8b3df-cceb-485b-b985-7bdad0788aef-kube-api-access-zfdjw\") pod \"69f8b3df-cceb-485b-b985-7bdad0788aef\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736543 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mrpbx\" (UniqueName: \"kubernetes.io/projected/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-kube-api-access-mrpbx\") pod \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736568 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8n59m\" (UniqueName: \"kubernetes.io/projected/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-kube-api-access-8n59m\") pod \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736602 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-catalog-content\") pod \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736622 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-catalog-content\") pod \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\" (UID: \"37a724f4-8b2c-4e9d-9502-85ac6fd9b9af\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736660 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-utilities\") pod \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736688 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-utilities\") pod \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736712 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-utilities\") pod \"de760b35-119f-4975-8eeb-76e8f9adb9f1\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736753 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-l7d9j\" (UniqueName: \"kubernetes.io/projected/9144c34a-7330-4d8b-aaa7-34747a3f4773-kube-api-access-l7d9j\") pod \"9144c34a-7330-4d8b-aaa7-34747a3f4773\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736788 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-catalog-content\") pod \"e57438f5-de09-4857-b5fc-e67b4c8c443d\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736832 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-catalog-content\") pod \"69f8b3df-cceb-485b-b985-7bdad0788aef\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736863 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-catalog-content\") pod \"de760b35-119f-4975-8eeb-76e8f9adb9f1\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736921 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-utilities\") pod \"9144c34a-7330-4d8b-aaa7-34747a3f4773\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736956 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-catalog-content\") pod \"9144c34a-7330-4d8b-aaa7-34747a3f4773\" (UID: \"9144c34a-7330-4d8b-aaa7-34747a3f4773\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.736986 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4r52s\" (UniqueName: \"kubernetes.io/projected/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-kube-api-access-4r52s\") pod \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\" (UID: \"05a3f3b1-e2d1-41b4-88c8-4b024440d5e9\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.737017 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-utilities\") pod \"69f8b3df-cceb-485b-b985-7bdad0788aef\" (UID: \"69f8b3df-cceb-485b-b985-7bdad0788aef\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.737048 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jf69c\" (UniqueName: \"kubernetes.io/projected/de760b35-119f-4975-8eeb-76e8f9adb9f1-kube-api-access-jf69c\") pod \"de760b35-119f-4975-8eeb-76e8f9adb9f1\" (UID: \"de760b35-119f-4975-8eeb-76e8f9adb9f1\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.737087 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-catalog-content\") pod \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\" (UID: \"c0738bd9-a74b-4aaa-a885-eae81ea2dc35\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.737114 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pd5vv\" (UniqueName: \"kubernetes.io/projected/e57438f5-de09-4857-b5fc-e67b4c8c443d-kube-api-access-pd5vv\") pod \"e57438f5-de09-4857-b5fc-e67b4c8c443d\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.737140 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-utilities\") pod \"e57438f5-de09-4857-b5fc-e67b4c8c443d\" (UID: \"e57438f5-de09-4857-b5fc-e67b4c8c443d\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.740283 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-utilities" (OuterVolumeSpecName: "utilities") pod "05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" (UID: "05a3f3b1-e2d1-41b4-88c8-4b024440d5e9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.740869 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-utilities" (OuterVolumeSpecName: "utilities") pod "37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" (UID: "37a724f4-8b2c-4e9d-9502-85ac6fd9b9af"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.742601 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-utilities" (OuterVolumeSpecName: "utilities") pod "c0738bd9-a74b-4aaa-a885-eae81ea2dc35" (UID: "c0738bd9-a74b-4aaa-a885-eae81ea2dc35"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.749272 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e57438f5-de09-4857-b5fc-e67b4c8c443d-kube-api-access-pd5vv" (OuterVolumeSpecName: "kube-api-access-pd5vv") pod "e57438f5-de09-4857-b5fc-e67b4c8c443d" (UID: "e57438f5-de09-4857-b5fc-e67b4c8c443d"). InnerVolumeSpecName "kube-api-access-pd5vv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.749649 4702 scope.go:117] "RemoveContainer" containerID="16722b785a2bd0614ab287bc2171c23bfd494dcb8c89cdb4f1035e29d3f9e099" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.757018 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-utilities" (OuterVolumeSpecName: "utilities") pod "de760b35-119f-4975-8eeb-76e8f9adb9f1" (UID: "de760b35-119f-4975-8eeb-76e8f9adb9f1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.759110 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-utilities" (OuterVolumeSpecName: "utilities") pod "69f8b3df-cceb-485b-b985-7bdad0788aef" (UID: "69f8b3df-cceb-485b-b985-7bdad0788aef"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.765469 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.766064 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.778522 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-utilities" (OuterVolumeSpecName: "utilities") pod "e57438f5-de09-4857-b5fc-e67b4c8c443d" (UID: "e57438f5-de09-4857-b5fc-e67b4c8c443d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.780940 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-utilities" (OuterVolumeSpecName: "utilities") pod "9144c34a-7330-4d8b-aaa7-34747a3f4773" (UID: "9144c34a-7330-4d8b-aaa7-34747a3f4773"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.787317 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/de760b35-119f-4975-8eeb-76e8f9adb9f1-kube-api-access-jf69c" (OuterVolumeSpecName: "kube-api-access-jf69c") pod "de760b35-119f-4975-8eeb-76e8f9adb9f1" (UID: "de760b35-119f-4975-8eeb-76e8f9adb9f1"). InnerVolumeSpecName "kube-api-access-jf69c". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.787368 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-kube-api-access-4r52s" (OuterVolumeSpecName: "kube-api-access-4r52s") pod "05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" (UID: "05a3f3b1-e2d1-41b4-88c8-4b024440d5e9"). InnerVolumeSpecName "kube-api-access-4r52s". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.787386 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-kube-api-access-8n59m" (OuterVolumeSpecName: "kube-api-access-8n59m") pod "37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" (UID: "37a724f4-8b2c-4e9d-9502-85ac6fd9b9af"). InnerVolumeSpecName "kube-api-access-8n59m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.786394 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-kube-api-access-mrpbx" (OuterVolumeSpecName: "kube-api-access-mrpbx") pod "c0738bd9-a74b-4aaa-a885-eae81ea2dc35" (UID: "c0738bd9-a74b-4aaa-a885-eae81ea2dc35"). InnerVolumeSpecName "kube-api-access-mrpbx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.795729 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/69f8b3df-cceb-485b-b985-7bdad0788aef-kube-api-access-zfdjw" (OuterVolumeSpecName: "kube-api-access-zfdjw") pod "69f8b3df-cceb-485b-b985-7bdad0788aef" (UID: "69f8b3df-cceb-485b-b985-7bdad0788aef"). InnerVolumeSpecName "kube-api-access-zfdjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.795862 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9144c34a-7330-4d8b-aaa7-34747a3f4773-kube-api-access-l7d9j" (OuterVolumeSpecName: "kube-api-access-l7d9j") pod "9144c34a-7330-4d8b-aaa7-34747a3f4773" (UID: "9144c34a-7330-4d8b-aaa7-34747a3f4773"). InnerVolumeSpecName "kube-api-access-l7d9j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.804557 4702 scope.go:117] "RemoveContainer" containerID="e4f00f0f0edd4db95dbac20fdf4690b470be79299126a544101195f3edc2ca94" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.839819 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pd5vv\" (UniqueName: \"kubernetes.io/projected/e57438f5-de09-4857-b5fc-e67b4c8c443d-kube-api-access-pd5vv\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.839886 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840208 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840226 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zfdjw\" (UniqueName: \"kubernetes.io/projected/69f8b3df-cceb-485b-b985-7bdad0788aef-kube-api-access-zfdjw\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840311 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mrpbx\" (UniqueName: \"kubernetes.io/projected/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-kube-api-access-mrpbx\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840366 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8n59m\" (UniqueName: \"kubernetes.io/projected/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-kube-api-access-8n59m\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840384 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840399 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840412 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840425 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-l7d9j\" (UniqueName: \"kubernetes.io/projected/9144c34a-7330-4d8b-aaa7-34747a3f4773-kube-api-access-l7d9j\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840440 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840451 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4r52s\" (UniqueName: \"kubernetes.io/projected/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-kube-api-access-4r52s\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840462 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.840475 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jf69c\" (UniqueName: \"kubernetes.io/projected/de760b35-119f-4975-8eeb-76e8f9adb9f1-kube-api-access-jf69c\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.841001 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.847511 4702 scope.go:117] "RemoveContainer" containerID="6b80647459cfe754791ad602042a1fc5b6c2cf24f96ef0b16403cc9d94135398" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.874323 4702 scope.go:117] "RemoveContainer" containerID="fd8f6e4ff16f28dbaeeb5cfe7a68bf8676872b8db7a5e443ec70d338b7298c8c" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.880521 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "69f8b3df-cceb-485b-b985-7bdad0788aef" (UID: "69f8b3df-cceb-485b-b985-7bdad0788aef"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.891839 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0738bd9-a74b-4aaa-a885-eae81ea2dc35" (UID: "c0738bd9-a74b-4aaa-a885-eae81ea2dc35"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.901624 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "de760b35-119f-4975-8eeb-76e8f9adb9f1" (UID: "de760b35-119f-4975-8eeb-76e8f9adb9f1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.902951 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" (UID: "05a3f3b1-e2d1-41b4-88c8-4b024440d5e9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.903244 4702 scope.go:117] "RemoveContainer" containerID="562e354a5586b396dfa7214776c25fc833960ae46681ea259d0fb8b8ce575fd0" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.913644 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e57438f5-de09-4857-b5fc-e67b4c8c443d" (UID: "e57438f5-de09-4857-b5fc-e67b4c8c443d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.913893 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" (UID: "37a724f4-8b2c-4e9d-9502-85ac6fd9b9af"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.924673 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "9144c34a-7330-4d8b-aaa7-34747a3f4773" (UID: "9144c34a-7330-4d8b-aaa7-34747a3f4773"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.928197 4702 scope.go:117] "RemoveContainer" containerID="1f53ec9b9317ce69c14109e5831e171a7be12e554ea72f44a87ae7f793a0b33d" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.940870 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-utilities\") pod \"c78fcba2-ffee-4ad3-bccd-085a90c81236\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.940941 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-catalog-content\") pod \"615b8e09-5a50-4af4-89dd-31fb6282baea\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.940964 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-catalog-content\") pod \"c78fcba2-ffee-4ad3-bccd-085a90c81236\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941034 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-utilities\") pod \"615b8e09-5a50-4af4-89dd-31fb6282baea\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941092 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6lpk2\" (UniqueName: \"kubernetes.io/projected/615b8e09-5a50-4af4-89dd-31fb6282baea-kube-api-access-6lpk2\") pod \"615b8e09-5a50-4af4-89dd-31fb6282baea\" (UID: \"615b8e09-5a50-4af4-89dd-31fb6282baea\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941154 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9tzdv\" (UniqueName: \"kubernetes.io/projected/c78fcba2-ffee-4ad3-bccd-085a90c81236-kube-api-access-9tzdv\") pod \"c78fcba2-ffee-4ad3-bccd-085a90c81236\" (UID: \"c78fcba2-ffee-4ad3-bccd-085a90c81236\") " Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941444 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941460 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941471 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e57438f5-de09-4857-b5fc-e67b4c8c443d-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941480 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/69f8b3df-cceb-485b-b985-7bdad0788aef-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941489 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/de760b35-119f-4975-8eeb-76e8f9adb9f1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941497 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/9144c34a-7330-4d8b-aaa7-34747a3f4773-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.941505 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0738bd9-a74b-4aaa-a885-eae81ea2dc35-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.942669 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-utilities" (OuterVolumeSpecName: "utilities") pod "c78fcba2-ffee-4ad3-bccd-085a90c81236" (UID: "c78fcba2-ffee-4ad3-bccd-085a90c81236"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.950319 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c78fcba2-ffee-4ad3-bccd-085a90c81236-kube-api-access-9tzdv" (OuterVolumeSpecName: "kube-api-access-9tzdv") pod "c78fcba2-ffee-4ad3-bccd-085a90c81236" (UID: "c78fcba2-ffee-4ad3-bccd-085a90c81236"). InnerVolumeSpecName "kube-api-access-9tzdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.950396 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/615b8e09-5a50-4af4-89dd-31fb6282baea-kube-api-access-6lpk2" (OuterVolumeSpecName: "kube-api-access-6lpk2") pod "615b8e09-5a50-4af4-89dd-31fb6282baea" (UID: "615b8e09-5a50-4af4-89dd-31fb6282baea"). InnerVolumeSpecName "kube-api-access-6lpk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.958293 4702 scope.go:117] "RemoveContainer" containerID="b4d4129c85b186b09db146d011476dd14065cbc88bfcd8343643f892675fa089" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.960332 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-bhnmp"] Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.966600 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-utilities" (OuterVolumeSpecName: "utilities") pod "615b8e09-5a50-4af4-89dd-31fb6282baea" (UID: "615b8e09-5a50-4af4-89dd-31fb6282baea"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.968463 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-bhnmp"] Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.986369 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-bc4kb"] Nov 25 10:48:23 crc kubenswrapper[4702]: I1125 10:48:23.991597 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-bc4kb"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.005307 4702 scope.go:117] "RemoveContainer" containerID="53b110d0951b8e4d4e6d96c47f37d083ecfbb4be7c20de9a23d143d5aaada4f1" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.018812 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-h8j5p"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.043263 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-utilities\") pod \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.043417 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-catalog-content\") pod \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.043513 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xdg4f\" (UniqueName: \"kubernetes.io/projected/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-kube-api-access-xdg4f\") pod \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\" (UID: \"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4\") " Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.043816 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6lpk2\" (UniqueName: \"kubernetes.io/projected/615b8e09-5a50-4af4-89dd-31fb6282baea-kube-api-access-6lpk2\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.043954 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9tzdv\" (UniqueName: \"kubernetes.io/projected/c78fcba2-ffee-4ad3-bccd-085a90c81236-kube-api-access-9tzdv\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.044045 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.044106 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.045966 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-h8j5p"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.046586 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-utilities" (OuterVolumeSpecName: "utilities") pod "d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" (UID: "d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.047192 4702 scope.go:117] "RemoveContainer" containerID="c95cf99c9859a0a90ad2d51c33e30fa78b188a36585ca8ee27e23372a7563d1b" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.056995 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-wg9lr"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.066150 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-kube-api-access-xdg4f" (OuterVolumeSpecName: "kube-api-access-xdg4f") pod "d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" (UID: "d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4"). InnerVolumeSpecName "kube-api-access-xdg4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.067182 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-wg9lr"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.075713 4702 scope.go:117] "RemoveContainer" containerID="bc21978bf89bc93296b40463109d973c47a4e9dfdb0027923fa6d423e2cedb06" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.078398 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "615b8e09-5a50-4af4-89dd-31fb6282baea" (UID: "615b8e09-5a50-4af4-89dd-31fb6282baea"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.082750 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c78fcba2-ffee-4ad3-bccd-085a90c81236" (UID: "c78fcba2-ffee-4ad3-bccd-085a90c81236"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.082866 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zcs6g"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.095964 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zcs6g"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.114386 4702 scope.go:117] "RemoveContainer" containerID="a08d5d8d8fab0ae910786624af8f46cfbdfe6c48ca29a3102c0395ca243f275f" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.114512 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-46fx5"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.125157 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-46fx5"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.133070 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2x6vv"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.142070 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2x6vv"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.144518 4702 scope.go:117] "RemoveContainer" containerID="e2032e149fb4598772a532e45cd059217a797b1b3b6bca41f6a69ad3f64a5950" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.145222 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/615b8e09-5a50-4af4-89dd-31fb6282baea-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.145253 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.145267 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c78fcba2-ffee-4ad3-bccd-085a90c81236-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.145279 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xdg4f\" (UniqueName: \"kubernetes.io/projected/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-kube-api-access-xdg4f\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.149693 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" (UID: "d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.164300 4702 scope.go:117] "RemoveContainer" containerID="03247ff6e8c0d7878d3dd0d3e07c678dbcf64411ff7e81b07330c010096be7a7" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.182070 4702 scope.go:117] "RemoveContainer" containerID="fc910157359e7b3e7050f7a6f381c53f4d647a501253657d603dd0ba134318bb" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.199105 4702 scope.go:117] "RemoveContainer" containerID="abba02ee45d72287f86337761e2f571c705f0c164992cb25e45c89119df2b4b4" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.215872 4702 scope.go:117] "RemoveContainer" containerID="28fbc32d80bfbfb17e1e87e43dafd168167bf031d77103973095c6c2b341ac9b" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.233101 4702 scope.go:117] "RemoveContainer" containerID="eed2f54a4bd92355f520f32986da18b75c3745175035e0a9db410bd3138bb2a3" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.246709 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.252793 4702 scope.go:117] "RemoveContainer" containerID="490617530ab7239bc926f99540292a6775c79a2aecaa7f04e8338a38d73a9890" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.727821 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-x6blk" event={"ID":"c78fcba2-ffee-4ad3-bccd-085a90c81236","Type":"ContainerDied","Data":"85c26e137c093486705de124609b0400b0b6f7f89021a6be1330fbd7616a1bc2"} Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.727913 4702 scope.go:117] "RemoveContainer" containerID="b5e5f66efdaedeb410a56f03c27c69f594fc16c7b4ab2ece72166007fea6b432" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.727835 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-x6blk" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.733657 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" event={"ID":"a6507463-185a-40db-9736-bfcc4f0928e9","Type":"ContainerStarted","Data":"a79cee05fddcded7f5f49b3b7b40a2bf7ad86630fea489981bc1d88364cdcfe8"} Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.734222 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.738146 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-zs89l" event={"ID":"d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4","Type":"ContainerDied","Data":"bc56638e36d64076c03f0bdffd8e503b7930775b90ff4d3c7e51fd19ff0cd385"} Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.738262 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-zs89l" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.747137 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-2dxxh" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.747155 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-2dxxh" event={"ID":"615b8e09-5a50-4af4-89dd-31fb6282baea","Type":"ContainerDied","Data":"1dc4fe0b73f92d512d1acff1eef1a8c52c10b55f452a012750814bace1b1bdf4"} Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.750658 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" event={"ID":"f587e69f-3aaf-403e-a060-bf4542e19ec8","Type":"ContainerStarted","Data":"8068edffc84f77fd6c20d48b26a0deaca6862cd1e8f99901ba162415c8e2cb48"} Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.750732 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.753963 4702 scope.go:117] "RemoveContainer" containerID="af614a0c2c218fe778a04ff296b9b57066d4922e2c12f2bdb0cc8f213f84809d" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.772013 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" podStartSLOduration=12.042570126 podStartE2EDuration="28.771991762s" podCreationTimestamp="2025-11-25 10:47:56 +0000 UTC" firstStartedPulling="2025-11-25 10:48:06.787346168 +0000 UTC m=+984.153941867" lastFinishedPulling="2025-11-25 10:48:23.516767814 +0000 UTC m=+1000.883363503" observedRunningTime="2025-11-25 10:48:24.757752486 +0000 UTC m=+1002.124348175" watchObservedRunningTime="2025-11-25 10:48:24.771991762 +0000 UTC m=+1002.138587461" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.789245 4702 scope.go:117] "RemoveContainer" containerID="98655131736db000215edc624e1852c8761f2289b8d63e356f869300f55ddf02" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.793309 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-x6blk"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.807252 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-x6blk"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.811605 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" podStartSLOduration=13.622108683 podStartE2EDuration="28.811590699s" podCreationTimestamp="2025-11-25 10:47:56 +0000 UTC" firstStartedPulling="2025-11-25 10:48:08.377133145 +0000 UTC m=+985.743728834" lastFinishedPulling="2025-11-25 10:48:23.566615161 +0000 UTC m=+1000.933210850" observedRunningTime="2025-11-25 10:48:24.808013574 +0000 UTC m=+1002.174609263" watchObservedRunningTime="2025-11-25 10:48:24.811590699 +0000 UTC m=+1002.178186388" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.832197 4702 scope.go:117] "RemoveContainer" containerID="e3b4933b9fae4cc93095d35563a121bb691a570b58e5ab711dd4b2e2d9e6118e" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.840321 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-zs89l"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.849280 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-zs89l"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.855231 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-2dxxh"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.860037 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-2dxxh"] Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.867115 4702 scope.go:117] "RemoveContainer" containerID="c858ee7e4750538a14289f8508294af65523561215da9da608360d5f817c5ab0" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.890065 4702 scope.go:117] "RemoveContainer" containerID="7a60e0f96c7e51154e76c9f2c41558246ce368ce1fad09704b0bcfed01d1aa88" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.910427 4702 scope.go:117] "RemoveContainer" containerID="7ffa06af9e254343a7e2b52dc964d06ce8cf159a6678e9676dcf87583f439450" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.932532 4702 scope.go:117] "RemoveContainer" containerID="911d439255c9b674d4d8075b0159f31aea5ccff302dd15cfda8f6e2d0e9bb31b" Nov 25 10:48:24 crc kubenswrapper[4702]: I1125 10:48:24.962686 4702 scope.go:117] "RemoveContainer" containerID="2d9a65d40c08092e2c4483e6a797f055140780b63a8959129157e9b1ffbc01ca" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.410114 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" path="/var/lib/kubelet/pods/05a3f3b1-e2d1-41b4-88c8-4b024440d5e9/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.410962 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" path="/var/lib/kubelet/pods/37a724f4-8b2c-4e9d-9502-85ac6fd9b9af/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.411700 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" path="/var/lib/kubelet/pods/615b8e09-5a50-4af4-89dd-31fb6282baea/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.412987 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" path="/var/lib/kubelet/pods/69f8b3df-cceb-485b-b985-7bdad0788aef/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.413735 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" path="/var/lib/kubelet/pods/9144c34a-7330-4d8b-aaa7-34747a3f4773/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.415060 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" path="/var/lib/kubelet/pods/c0738bd9-a74b-4aaa-a885-eae81ea2dc35/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.416012 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" path="/var/lib/kubelet/pods/c78fcba2-ffee-4ad3-bccd-085a90c81236/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.416617 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" path="/var/lib/kubelet/pods/d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.417566 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" path="/var/lib/kubelet/pods/de760b35-119f-4975-8eeb-76e8f9adb9f1/volumes" Nov 25 10:48:25 crc kubenswrapper[4702]: I1125 10:48:25.418234 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" path="/var/lib/kubelet/pods/e57438f5-de09-4857-b5fc-e67b4c8c443d/volumes" Nov 25 10:48:26 crc kubenswrapper[4702]: I1125 10:48:26.777933 4702 generic.go:334] "Generic (PLEG): container finished" podID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" exitCode=1 Nov 25 10:48:26 crc kubenswrapper[4702]: I1125 10:48:26.778116 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerDied","Data":"af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28"} Nov 25 10:48:26 crc kubenswrapper[4702]: I1125 10:48:26.779128 4702 scope.go:117] "RemoveContainer" containerID="39543bdaf755be3b942ad694a42d4203bf9d4cfc1c4e666e12098877b51f0a08" Nov 25 10:48:26 crc kubenswrapper[4702]: I1125 10:48:26.779838 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 10:48:26 crc kubenswrapper[4702]: E1125 10:48:26.780161 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:48:28 crc kubenswrapper[4702]: I1125 10:48:28.073507 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:48:28 crc kubenswrapper[4702]: I1125 10:48:28.073853 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:48:28 crc kubenswrapper[4702]: I1125 10:48:28.113457 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:48:28 crc kubenswrapper[4702]: I1125 10:48:28.847010 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:48:29 crc kubenswrapper[4702]: I1125 10:48:29.012378 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:48:29 crc kubenswrapper[4702]: I1125 10:48:29.054324 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:48:31 crc kubenswrapper[4702]: I1125 10:48:31.709282 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8jqc"] Nov 25 10:48:31 crc kubenswrapper[4702]: I1125 10:48:31.709972 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-p8jqc" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="registry-server" containerID="cri-o://75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96" gracePeriod=2 Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.302913 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-knkf9"] Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.303132 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-knkf9" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="registry-server" containerID="cri-o://0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df" gracePeriod=2 Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.697175 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.708692 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5k2q"] Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.708987 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-g5k2q" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="registry-server" containerID="cri-o://588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197" gracePeriod=2 Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.735547 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.831293 4702 generic.go:334] "Generic (PLEG): container finished" podID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerID="75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96" exitCode=0 Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.831351 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-p8jqc" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.831370 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerDied","Data":"75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96"} Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.831398 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-p8jqc" event={"ID":"ab9c17d4-6fbd-4439-85ff-80db813e03a8","Type":"ContainerDied","Data":"1451674505b3a58a4891e4f82ba8ef635f5a660ce737753ec406e05d032b5c9e"} Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.831418 4702 scope.go:117] "RemoveContainer" containerID="75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.842159 4702 generic.go:334] "Generic (PLEG): container finished" podID="e9dcc033-976b-440e-88ca-0c3b72212057" containerID="0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df" exitCode=0 Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.842208 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-knkf9" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.842217 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerDied","Data":"0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df"} Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.842277 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-knkf9" event={"ID":"e9dcc033-976b-440e-88ca-0c3b72212057","Type":"ContainerDied","Data":"e90fc578c85e73520f220c9fefe71a8a2a3d1325a9aa20590274246b1f247369"} Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.863743 4702 scope.go:117] "RemoveContainer" containerID="4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.866952 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-catalog-content\") pod \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.866997 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-catalog-content\") pod \"e9dcc033-976b-440e-88ca-0c3b72212057\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.867051 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-utilities\") pod \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.867131 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-76lr4\" (UniqueName: \"kubernetes.io/projected/ab9c17d4-6fbd-4439-85ff-80db813e03a8-kube-api-access-76lr4\") pod \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\" (UID: \"ab9c17d4-6fbd-4439-85ff-80db813e03a8\") " Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.867165 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-utilities\") pod \"e9dcc033-976b-440e-88ca-0c3b72212057\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.867243 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fk6ww\" (UniqueName: \"kubernetes.io/projected/e9dcc033-976b-440e-88ca-0c3b72212057-kube-api-access-fk6ww\") pod \"e9dcc033-976b-440e-88ca-0c3b72212057\" (UID: \"e9dcc033-976b-440e-88ca-0c3b72212057\") " Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.868236 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-utilities" (OuterVolumeSpecName: "utilities") pod "ab9c17d4-6fbd-4439-85ff-80db813e03a8" (UID: "ab9c17d4-6fbd-4439-85ff-80db813e03a8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.868444 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-utilities" (OuterVolumeSpecName: "utilities") pod "e9dcc033-976b-440e-88ca-0c3b72212057" (UID: "e9dcc033-976b-440e-88ca-0c3b72212057"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.876383 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9dcc033-976b-440e-88ca-0c3b72212057-kube-api-access-fk6ww" (OuterVolumeSpecName: "kube-api-access-fk6ww") pod "e9dcc033-976b-440e-88ca-0c3b72212057" (UID: "e9dcc033-976b-440e-88ca-0c3b72212057"). InnerVolumeSpecName "kube-api-access-fk6ww". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.881197 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab9c17d4-6fbd-4439-85ff-80db813e03a8-kube-api-access-76lr4" (OuterVolumeSpecName: "kube-api-access-76lr4") pod "ab9c17d4-6fbd-4439-85ff-80db813e03a8" (UID: "ab9c17d4-6fbd-4439-85ff-80db813e03a8"). InnerVolumeSpecName "kube-api-access-76lr4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.881566 4702 scope.go:117] "RemoveContainer" containerID="d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.906028 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kd7rs"] Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.906278 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-kd7rs" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="registry-server" containerID="cri-o://519963194f3114e18fcc237a4e4105d8409a40d83cce36f15a20e0b5fa5f402e" gracePeriod=2 Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.931466 4702 scope.go:117] "RemoveContainer" containerID="75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96" Nov 25 10:48:32 crc kubenswrapper[4702]: E1125 10:48:32.932421 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96\": container with ID starting with 75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96 not found: ID does not exist" containerID="75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.932466 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96"} err="failed to get container status \"75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96\": rpc error: code = NotFound desc = could not find container \"75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96\": container with ID starting with 75fcb7fb422d409a085f5941336eba2ccca5f9703eda9898251845e277db2f96 not found: ID does not exist" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.932492 4702 scope.go:117] "RemoveContainer" containerID="4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519" Nov 25 10:48:32 crc kubenswrapper[4702]: E1125 10:48:32.933077 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519\": container with ID starting with 4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519 not found: ID does not exist" containerID="4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.933111 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519"} err="failed to get container status \"4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519\": rpc error: code = NotFound desc = could not find container \"4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519\": container with ID starting with 4828713f90e3b3e930a6b7027d1c409ad14120c721fd814ff37fb528f4df6519 not found: ID does not exist" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.933135 4702 scope.go:117] "RemoveContainer" containerID="d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174" Nov 25 10:48:32 crc kubenswrapper[4702]: E1125 10:48:32.933472 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174\": container with ID starting with d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174 not found: ID does not exist" containerID="d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.933531 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174"} err="failed to get container status \"d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174\": rpc error: code = NotFound desc = could not find container \"d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174\": container with ID starting with d7fdaa7317a897ff8c3a5c9a4451e62a02a6270de7c6aeef0f5a05885e886174 not found: ID does not exist" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.933558 4702 scope.go:117] "RemoveContainer" containerID="0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.969239 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.969281 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-76lr4\" (UniqueName: \"kubernetes.io/projected/ab9c17d4-6fbd-4439-85ff-80db813e03a8-kube-api-access-76lr4\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.969294 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.969303 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fk6ww\" (UniqueName: \"kubernetes.io/projected/e9dcc033-976b-440e-88ca-0c3b72212057-kube-api-access-fk6ww\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.973012 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e9dcc033-976b-440e-88ca-0c3b72212057" (UID: "e9dcc033-976b-440e-88ca-0c3b72212057"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.977162 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ab9c17d4-6fbd-4439-85ff-80db813e03a8" (UID: "ab9c17d4-6fbd-4439-85ff-80db813e03a8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.981938 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.982887 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 10:48:32 crc kubenswrapper[4702]: E1125 10:48:32.983332 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:48:32 crc kubenswrapper[4702]: I1125 10:48:32.984199 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.015193 4702 scope.go:117] "RemoveContainer" containerID="b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.050064 4702 scope.go:117] "RemoveContainer" containerID="8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.067115 4702 scope.go:117] "RemoveContainer" containerID="0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df" Nov 25 10:48:33 crc kubenswrapper[4702]: E1125 10:48:33.067642 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df\": container with ID starting with 0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df not found: ID does not exist" containerID="0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.067674 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df"} err="failed to get container status \"0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df\": rpc error: code = NotFound desc = could not find container \"0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df\": container with ID starting with 0859994e9bbb15a2dc21e8f3ffd5bd1944143fd07a302f8eee43ecee1c56b2df not found: ID does not exist" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.067702 4702 scope.go:117] "RemoveContainer" containerID="b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3" Nov 25 10:48:33 crc kubenswrapper[4702]: E1125 10:48:33.068491 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3\": container with ID starting with b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3 not found: ID does not exist" containerID="b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.068524 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3"} err="failed to get container status \"b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3\": rpc error: code = NotFound desc = could not find container \"b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3\": container with ID starting with b892d8076382c3ce0e63d4e4a4143eaac250708d453e060221fdac0e7e85bcc3 not found: ID does not exist" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.068543 4702 scope.go:117] "RemoveContainer" containerID="8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd" Nov 25 10:48:33 crc kubenswrapper[4702]: E1125 10:48:33.070183 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd\": container with ID starting with 8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd not found: ID does not exist" containerID="8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.070260 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd"} err="failed to get container status \"8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd\": rpc error: code = NotFound desc = could not find container \"8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd\": container with ID starting with 8ec351bfa6f741d62af171b367918d1bd9f38c66c3968db6147b5dd5bf5bf7bd not found: ID does not exist" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.070947 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ab9c17d4-6fbd-4439-85ff-80db813e03a8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.071143 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e9dcc033-976b-440e-88ca-0c3b72212057-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.226875 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-knkf9"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.232879 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-knkf9"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.237592 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-p8jqc"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.244059 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-p8jqc"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.306252 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggkwj"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.306499 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ggkwj" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="registry-server" containerID="cri-o://7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0" gracePeriod=2 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.414524 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" path="/var/lib/kubelet/pods/ab9c17d4-6fbd-4439-85ff-80db813e03a8/volumes" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.415130 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" path="/var/lib/kubelet/pods/e9dcc033-976b-440e-88ca-0c3b72212057/volumes" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.508849 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9ps98"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.509154 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-9ps98" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="registry-server" containerID="cri-o://17eddc39edc726a885e63fe2a81872545ebe140e270e7648c90579fc6878c8d0" gracePeriod=2 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.586601 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.784265 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-65kts\" (UniqueName: \"kubernetes.io/projected/243dc11a-4786-46eb-b000-2ccab5aeb028-kube-api-access-65kts\") pod \"243dc11a-4786-46eb-b000-2ccab5aeb028\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.784316 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-utilities\") pod \"243dc11a-4786-46eb-b000-2ccab5aeb028\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.784380 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-catalog-content\") pod \"243dc11a-4786-46eb-b000-2ccab5aeb028\" (UID: \"243dc11a-4786-46eb-b000-2ccab5aeb028\") " Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.786998 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-utilities" (OuterVolumeSpecName: "utilities") pod "243dc11a-4786-46eb-b000-2ccab5aeb028" (UID: "243dc11a-4786-46eb-b000-2ccab5aeb028"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.791043 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/243dc11a-4786-46eb-b000-2ccab5aeb028-kube-api-access-65kts" (OuterVolumeSpecName: "kube-api-access-65kts") pod "243dc11a-4786-46eb-b000-2ccab5aeb028" (UID: "243dc11a-4786-46eb-b000-2ccab5aeb028"). InnerVolumeSpecName "kube-api-access-65kts". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.791164 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.807941 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "243dc11a-4786-46eb-b000-2ccab5aeb028" (UID: "243dc11a-4786-46eb-b000-2ccab5aeb028"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.859746 4702 generic.go:334] "Generic (PLEG): container finished" podID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerID="588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197" exitCode=0 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.859812 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-g5k2q" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.859846 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerDied","Data":"588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197"} Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.859893 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-g5k2q" event={"ID":"243dc11a-4786-46eb-b000-2ccab5aeb028","Type":"ContainerDied","Data":"185076190e74405ac380afdd6f8d79230835b27e21365b09bfd2514803a27ed4"} Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.859938 4702 scope.go:117] "RemoveContainer" containerID="588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.864554 4702 generic.go:334] "Generic (PLEG): container finished" podID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerID="7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0" exitCode=0 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.864595 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerDied","Data":"7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0"} Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.864611 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ggkwj" event={"ID":"bf090575-6d44-4e0b-9522-cb864bb8169b","Type":"ContainerDied","Data":"5d260fe2c9e6288c64bb9221d20254a5a93203a7ec10ad5a9ab53a4df5b2ebcd"} Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.864674 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ggkwj" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.869974 4702 generic.go:334] "Generic (PLEG): container finished" podID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerID="519963194f3114e18fcc237a4e4105d8409a40d83cce36f15a20e0b5fa5f402e" exitCode=0 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.870037 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerDied","Data":"519963194f3114e18fcc237a4e4105d8409a40d83cce36f15a20e0b5fa5f402e"} Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.872649 4702 generic.go:334] "Generic (PLEG): container finished" podID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerID="17eddc39edc726a885e63fe2a81872545ebe140e270e7648c90579fc6878c8d0" exitCode=0 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.872675 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerDied","Data":"17eddc39edc726a885e63fe2a81872545ebe140e270e7648c90579fc6878c8d0"} Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.874134 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 10:48:33 crc kubenswrapper[4702]: E1125 10:48:33.874468 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.892134 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-utilities\") pod \"bf090575-6d44-4e0b-9522-cb864bb8169b\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.892209 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2fxvj\" (UniqueName: \"kubernetes.io/projected/bf090575-6d44-4e0b-9522-cb864bb8169b-kube-api-access-2fxvj\") pod \"bf090575-6d44-4e0b-9522-cb864bb8169b\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.892242 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-catalog-content\") pod \"bf090575-6d44-4e0b-9522-cb864bb8169b\" (UID: \"bf090575-6d44-4e0b-9522-cb864bb8169b\") " Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.892737 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-65kts\" (UniqueName: \"kubernetes.io/projected/243dc11a-4786-46eb-b000-2ccab5aeb028-kube-api-access-65kts\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.892760 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.892778 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/243dc11a-4786-46eb-b000-2ccab5aeb028-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.896485 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-utilities" (OuterVolumeSpecName: "utilities") pod "bf090575-6d44-4e0b-9522-cb864bb8169b" (UID: "bf090575-6d44-4e0b-9522-cb864bb8169b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.899444 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf090575-6d44-4e0b-9522-cb864bb8169b-kube-api-access-2fxvj" (OuterVolumeSpecName: "kube-api-access-2fxvj") pod "bf090575-6d44-4e0b-9522-cb864bb8169b" (UID: "bf090575-6d44-4e0b-9522-cb864bb8169b"). InnerVolumeSpecName "kube-api-access-2fxvj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.920029 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "bf090575-6d44-4e0b-9522-cb864bb8169b" (UID: "bf090575-6d44-4e0b-9522-cb864bb8169b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.922626 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w9lw"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.922879 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7w9lw" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="registry-server" containerID="cri-o://bc01dd268d4cfd6f44de94fa63a3fac8de6a9743f855f1ae210d05d66f2841f2" gracePeriod=2 Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.966695 4702 scope.go:117] "RemoveContainer" containerID="204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.967358 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.978982 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.980418 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5k2q"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.984868 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-g5k2q"] Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.994055 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.994112 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2fxvj\" (UniqueName: \"kubernetes.io/projected/bf090575-6d44-4e0b-9522-cb864bb8169b-kube-api-access-2fxvj\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:33 crc kubenswrapper[4702]: I1125 10:48:33.994125 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/bf090575-6d44-4e0b-9522-cb864bb8169b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.016331 4702 scope.go:117] "RemoveContainer" containerID="4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.034718 4702 scope.go:117] "RemoveContainer" containerID="588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197" Nov 25 10:48:34 crc kubenswrapper[4702]: E1125 10:48:34.035411 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197\": container with ID starting with 588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197 not found: ID does not exist" containerID="588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.035457 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197"} err="failed to get container status \"588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197\": rpc error: code = NotFound desc = could not find container \"588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197\": container with ID starting with 588cae85b711311ea3858914af64cbe240e3cf36222e9063773fb00b9f664197 not found: ID does not exist" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.035484 4702 scope.go:117] "RemoveContainer" containerID="204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5" Nov 25 10:48:34 crc kubenswrapper[4702]: E1125 10:48:34.035951 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5\": container with ID starting with 204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5 not found: ID does not exist" containerID="204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.035973 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5"} err="failed to get container status \"204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5\": rpc error: code = NotFound desc = could not find container \"204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5\": container with ID starting with 204fea7a99722c018c3e31818bc9d066dbfeacc73f1de3394f13121a35007ec5 not found: ID does not exist" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.035988 4702 scope.go:117] "RemoveContainer" containerID="4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d" Nov 25 10:48:34 crc kubenswrapper[4702]: E1125 10:48:34.036297 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d\": container with ID starting with 4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d not found: ID does not exist" containerID="4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.036351 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d"} err="failed to get container status \"4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d\": rpc error: code = NotFound desc = could not find container \"4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d\": container with ID starting with 4369fc391d39e2309c38a0c47aa638439d742b8093e99ba0f525e900cc16498d not found: ID does not exist" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.036380 4702 scope.go:117] "RemoveContainer" containerID="7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.049606 4702 scope.go:117] "RemoveContainer" containerID="ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.065299 4702 scope.go:117] "RemoveContainer" containerID="5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.095229 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9sb4f\" (UniqueName: \"kubernetes.io/projected/62348515-75a4-4328-beb2-9e7df5e23fc3-kube-api-access-9sb4f\") pod \"62348515-75a4-4328-beb2-9e7df5e23fc3\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.095292 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-utilities\") pod \"62348515-75a4-4328-beb2-9e7df5e23fc3\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.095370 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-catalog-content\") pod \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.095416 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-utilities\") pod \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.095462 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-catalog-content\") pod \"62348515-75a4-4328-beb2-9e7df5e23fc3\" (UID: \"62348515-75a4-4328-beb2-9e7df5e23fc3\") " Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.095487 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9dz9n\" (UniqueName: \"kubernetes.io/projected/99bc87a4-5a14-4179-9e8b-4a49298b6f78-kube-api-access-9dz9n\") pod \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\" (UID: \"99bc87a4-5a14-4179-9e8b-4a49298b6f78\") " Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.096474 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-utilities" (OuterVolumeSpecName: "utilities") pod "99bc87a4-5a14-4179-9e8b-4a49298b6f78" (UID: "99bc87a4-5a14-4179-9e8b-4a49298b6f78"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.096776 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-utilities" (OuterVolumeSpecName: "utilities") pod "62348515-75a4-4328-beb2-9e7df5e23fc3" (UID: "62348515-75a4-4328-beb2-9e7df5e23fc3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.100260 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99bc87a4-5a14-4179-9e8b-4a49298b6f78-kube-api-access-9dz9n" (OuterVolumeSpecName: "kube-api-access-9dz9n") pod "99bc87a4-5a14-4179-9e8b-4a49298b6f78" (UID: "99bc87a4-5a14-4179-9e8b-4a49298b6f78"). InnerVolumeSpecName "kube-api-access-9dz9n". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.101609 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/62348515-75a4-4328-beb2-9e7df5e23fc3-kube-api-access-9sb4f" (OuterVolumeSpecName: "kube-api-access-9sb4f") pod "62348515-75a4-4328-beb2-9e7df5e23fc3" (UID: "62348515-75a4-4328-beb2-9e7df5e23fc3"). InnerVolumeSpecName "kube-api-access-9sb4f". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.106672 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gzp7g"] Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.106974 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-gzp7g" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="registry-server" containerID="cri-o://73d0421047e9319ed5185bfb7113f9a3677c7da744fcf150b2a0f9017f233213" gracePeriod=2 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.146088 4702 scope.go:117] "RemoveContainer" containerID="7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0" Nov 25 10:48:34 crc kubenswrapper[4702]: E1125 10:48:34.148722 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0\": container with ID starting with 7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0 not found: ID does not exist" containerID="7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.148758 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0"} err="failed to get container status \"7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0\": rpc error: code = NotFound desc = could not find container \"7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0\": container with ID starting with 7ecae91df4077a74bc90341b39a1899f6ac4c0e95979c284f82e37b5d5fb9cb0 not found: ID does not exist" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.148780 4702 scope.go:117] "RemoveContainer" containerID="ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a" Nov 25 10:48:34 crc kubenswrapper[4702]: E1125 10:48:34.151229 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a\": container with ID starting with ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a not found: ID does not exist" containerID="ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.151282 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a"} err="failed to get container status \"ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a\": rpc error: code = NotFound desc = could not find container \"ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a\": container with ID starting with ec29f8661dfa0c8fc1037a0e9aa9ada9bbc89dea4d33f251db85c12455dca99a not found: ID does not exist" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.151315 4702 scope.go:117] "RemoveContainer" containerID="5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd" Nov 25 10:48:34 crc kubenswrapper[4702]: E1125 10:48:34.151633 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd\": container with ID starting with 5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd not found: ID does not exist" containerID="5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.151664 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd"} err="failed to get container status \"5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd\": rpc error: code = NotFound desc = could not find container \"5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd\": container with ID starting with 5c28e98a436187bf887d811e98c5422487e952e2ca8ee1995132e4d9a4ec9ebd not found: ID does not exist" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.197743 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9sb4f\" (UniqueName: \"kubernetes.io/projected/62348515-75a4-4328-beb2-9e7df5e23fc3-kube-api-access-9sb4f\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.197815 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.197826 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.197836 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9dz9n\" (UniqueName: \"kubernetes.io/projected/99bc87a4-5a14-4179-9e8b-4a49298b6f78-kube-api-access-9dz9n\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.199198 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "62348515-75a4-4328-beb2-9e7df5e23fc3" (UID: "62348515-75a4-4328-beb2-9e7df5e23fc3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.204627 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggkwj"] Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.209880 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ggkwj"] Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.212707 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99bc87a4-5a14-4179-9e8b-4a49298b6f78" (UID: "99bc87a4-5a14-4179-9e8b-4a49298b6f78"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.298781 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/62348515-75a4-4328-beb2-9e7df5e23fc3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.298826 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99bc87a4-5a14-4179-9e8b-4a49298b6f78-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.504916 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t6m9v"] Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.506049 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-t6m9v" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="registry-server" containerID="cri-o://180c73a750d26020495abd6139e9e036a1ecb8b9b101a83b25607c3f17a46d08" gracePeriod=2 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.705653 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hlfr2"] Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.705960 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-hlfr2" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="registry-server" containerID="cri-o://82351b6db28029ffcee108fe757f14e4156eca8b9d3a49945f316fda2ae2f586" gracePeriod=2 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.895236 4702 generic.go:334] "Generic (PLEG): container finished" podID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerID="73d0421047e9319ed5185bfb7113f9a3677c7da744fcf150b2a0f9017f233213" exitCode=0 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.895304 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerDied","Data":"73d0421047e9319ed5185bfb7113f9a3677c7da744fcf150b2a0f9017f233213"} Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.900004 4702 generic.go:334] "Generic (PLEG): container finished" podID="01585b85-8743-46df-bf57-28b9c7101515" containerID="82351b6db28029ffcee108fe757f14e4156eca8b9d3a49945f316fda2ae2f586" exitCode=0 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.900074 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerDied","Data":"82351b6db28029ffcee108fe757f14e4156eca8b9d3a49945f316fda2ae2f586"} Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.902921 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-kd7rs" event={"ID":"62348515-75a4-4328-beb2-9e7df5e23fc3","Type":"ContainerDied","Data":"0e1c96d92b5c6d7ef18434af3f6b0259a2e960b276391c966c326d2a3d4a7fe3"} Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.902999 4702 scope.go:117] "RemoveContainer" containerID="519963194f3114e18fcc237a4e4105d8409a40d83cce36f15a20e0b5fa5f402e" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.902950 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-kd7rs" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.909685 4702 generic.go:334] "Generic (PLEG): container finished" podID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerID="180c73a750d26020495abd6139e9e036a1ecb8b9b101a83b25607c3f17a46d08" exitCode=0 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.909754 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t6m9v" event={"ID":"2db4d43f-926d-4fba-84d8-e49c594c5026","Type":"ContainerDied","Data":"180c73a750d26020495abd6139e9e036a1ecb8b9b101a83b25607c3f17a46d08"} Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.914255 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-9ps98" Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.914253 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-9ps98" event={"ID":"99bc87a4-5a14-4179-9e8b-4a49298b6f78","Type":"ContainerDied","Data":"929e3cad31166c5b8255ef2a76eebd03219030517cc5de59fd3c5dee687bef27"} Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.917992 4702 generic.go:334] "Generic (PLEG): container finished" podID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerID="bc01dd268d4cfd6f44de94fa63a3fac8de6a9743f855f1ae210d05d66f2841f2" exitCode=0 Nov 25 10:48:34 crc kubenswrapper[4702]: I1125 10:48:34.918032 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerDied","Data":"bc01dd268d4cfd6f44de94fa63a3fac8de6a9743f855f1ae210d05d66f2841f2"} Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.004199 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-9ps98"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.011883 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-9ps98"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.014018 4702 scope.go:117] "RemoveContainer" containerID="3af6bc66bf0ee16d5141b4559355be0f0a4dede7cc13d96bd3995fd1aee9ca27" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.024523 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-kd7rs"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.043873 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-kd7rs"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.051077 4702 scope.go:117] "RemoveContainer" containerID="5b1f3446f2b17df3c21c4f1e77021594765bafddeec205c46bfbfdb4950e2e78" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.112645 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gqzjk"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.112894 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-gqzjk" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="registry-server" containerID="cri-o://1f23c6ae047c3758b97a19b0c7d6b75e736fd177f6640f03a6b833c0d09a06af" gracePeriod=2 Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.115342 4702 scope.go:117] "RemoveContainer" containerID="17eddc39edc726a885e63fe2a81872545ebe140e270e7648c90579fc6878c8d0" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.139727 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.146299 4702 scope.go:117] "RemoveContainer" containerID="142792dc155f780e17c5b4da679b3a01eee8a486ce94b58ae31734680c3db2c5" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.152124 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.166312 4702 scope.go:117] "RemoveContainer" containerID="5d8c9d358baadd16d57d954ffdf99d33db9b61c38057a429d8a23d0557ad4793" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.210058 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-catalog-content\") pod \"2980b36d-ef86-443d-9c30-b38cdf91e95b\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.210148 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tcg6r\" (UniqueName: \"kubernetes.io/projected/d93c64ec-5a95-4cef-a289-9fee39d7466f-kube-api-access-tcg6r\") pod \"d93c64ec-5a95-4cef-a289-9fee39d7466f\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.210222 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wzlrx\" (UniqueName: \"kubernetes.io/projected/2980b36d-ef86-443d-9c30-b38cdf91e95b-kube-api-access-wzlrx\") pod \"2980b36d-ef86-443d-9c30-b38cdf91e95b\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.210243 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-utilities\") pod \"2980b36d-ef86-443d-9c30-b38cdf91e95b\" (UID: \"2980b36d-ef86-443d-9c30-b38cdf91e95b\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.210325 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-catalog-content\") pod \"d93c64ec-5a95-4cef-a289-9fee39d7466f\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.210381 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-utilities\") pod \"d93c64ec-5a95-4cef-a289-9fee39d7466f\" (UID: \"d93c64ec-5a95-4cef-a289-9fee39d7466f\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.211680 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-utilities" (OuterVolumeSpecName: "utilities") pod "d93c64ec-5a95-4cef-a289-9fee39d7466f" (UID: "d93c64ec-5a95-4cef-a289-9fee39d7466f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.211934 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-utilities" (OuterVolumeSpecName: "utilities") pod "2980b36d-ef86-443d-9c30-b38cdf91e95b" (UID: "2980b36d-ef86-443d-9c30-b38cdf91e95b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.215187 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d93c64ec-5a95-4cef-a289-9fee39d7466f-kube-api-access-tcg6r" (OuterVolumeSpecName: "kube-api-access-tcg6r") pod "d93c64ec-5a95-4cef-a289-9fee39d7466f" (UID: "d93c64ec-5a95-4cef-a289-9fee39d7466f"). InnerVolumeSpecName "kube-api-access-tcg6r". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.215578 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2980b36d-ef86-443d-9c30-b38cdf91e95b-kube-api-access-wzlrx" (OuterVolumeSpecName: "kube-api-access-wzlrx") pod "2980b36d-ef86-443d-9c30-b38cdf91e95b" (UID: "2980b36d-ef86-443d-9c30-b38cdf91e95b"). InnerVolumeSpecName "kube-api-access-wzlrx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.230761 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d93c64ec-5a95-4cef-a289-9fee39d7466f" (UID: "d93c64ec-5a95-4cef-a289-9fee39d7466f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.311582 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.311620 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tcg6r\" (UniqueName: \"kubernetes.io/projected/d93c64ec-5a95-4cef-a289-9fee39d7466f-kube-api-access-tcg6r\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.311635 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wzlrx\" (UniqueName: \"kubernetes.io/projected/2980b36d-ef86-443d-9c30-b38cdf91e95b-kube-api-access-wzlrx\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.311646 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.311660 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d93c64ec-5a95-4cef-a289-9fee39d7466f-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.316014 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2980b36d-ef86-443d-9c30-b38cdf91e95b" (UID: "2980b36d-ef86-443d-9c30-b38cdf91e95b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.316303 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5hzg9"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.316585 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5hzg9" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="registry-server" containerID="cri-o://d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279" gracePeriod=2 Nov 25 10:48:35 crc kubenswrapper[4702]: E1125 10:48:35.396002 4702 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod215f2731_6dc6_465a_a076_7a08feb8e5b6.slice/crio-conmon-1f23c6ae047c3758b97a19b0c7d6b75e736fd177f6640f03a6b833c0d09a06af.scope\": RecentStats: unable to find data in memory cache]" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.412740 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2980b36d-ef86-443d-9c30-b38cdf91e95b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.414086 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" path="/var/lib/kubelet/pods/243dc11a-4786-46eb-b000-2ccab5aeb028/volumes" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.415497 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" path="/var/lib/kubelet/pods/62348515-75a4-4328-beb2-9e7df5e23fc3/volumes" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.416300 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" path="/var/lib/kubelet/pods/99bc87a4-5a14-4179-9e8b-4a49298b6f78/volumes" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.418013 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" path="/var/lib/kubelet/pods/bf090575-6d44-4e0b-9522-cb864bb8169b/volumes" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.497986 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.616664 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-utilities\") pod \"2db4d43f-926d-4fba-84d8-e49c594c5026\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.616742 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xwk4q\" (UniqueName: \"kubernetes.io/projected/2db4d43f-926d-4fba-84d8-e49c594c5026-kube-api-access-xwk4q\") pod \"2db4d43f-926d-4fba-84d8-e49c594c5026\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.616778 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-catalog-content\") pod \"2db4d43f-926d-4fba-84d8-e49c594c5026\" (UID: \"2db4d43f-926d-4fba-84d8-e49c594c5026\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.618225 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-utilities" (OuterVolumeSpecName: "utilities") pod "2db4d43f-926d-4fba-84d8-e49c594c5026" (UID: "2db4d43f-926d-4fba-84d8-e49c594c5026"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.627525 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db4d43f-926d-4fba-84d8-e49c594c5026-kube-api-access-xwk4q" (OuterVolumeSpecName: "kube-api-access-xwk4q") pod "2db4d43f-926d-4fba-84d8-e49c594c5026" (UID: "2db4d43f-926d-4fba-84d8-e49c594c5026"). InnerVolumeSpecName "kube-api-access-xwk4q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.639299 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2db4d43f-926d-4fba-84d8-e49c594c5026" (UID: "2db4d43f-926d-4fba-84d8-e49c594c5026"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.657276 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.708083 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8rxw"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.708384 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-x8rxw" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="registry-server" containerID="cri-o://4eb8d937e6f1dcba11a8c30d56006efeb607742c4550b50c45d3e74c89e2aaae" gracePeriod=2 Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.718453 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.718491 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xwk4q\" (UniqueName: \"kubernetes.io/projected/2db4d43f-926d-4fba-84d8-e49c594c5026-kube-api-access-xwk4q\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.718505 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2db4d43f-926d-4fba-84d8-e49c594c5026-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.819370 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-utilities\") pod \"01585b85-8743-46df-bf57-28b9c7101515\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.819476 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-catalog-content\") pod \"01585b85-8743-46df-bf57-28b9c7101515\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.819611 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hs8hm\" (UniqueName: \"kubernetes.io/projected/01585b85-8743-46df-bf57-28b9c7101515-kube-api-access-hs8hm\") pod \"01585b85-8743-46df-bf57-28b9c7101515\" (UID: \"01585b85-8743-46df-bf57-28b9c7101515\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.822671 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-utilities" (OuterVolumeSpecName: "utilities") pod "01585b85-8743-46df-bf57-28b9c7101515" (UID: "01585b85-8743-46df-bf57-28b9c7101515"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.824450 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01585b85-8743-46df-bf57-28b9c7101515-kube-api-access-hs8hm" (OuterVolumeSpecName: "kube-api-access-hs8hm") pod "01585b85-8743-46df-bf57-28b9c7101515" (UID: "01585b85-8743-46df-bf57-28b9c7101515"). InnerVolumeSpecName "kube-api-access-hs8hm". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.849825 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.913859 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnqkn"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.914558 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-tnqkn" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="registry-server" containerID="cri-o://bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343" gracePeriod=2 Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.923277 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ptsx9\" (UniqueName: \"kubernetes.io/projected/79c9c319-b87f-4dae-9744-03ef948bf068-kube-api-access-ptsx9\") pod \"79c9c319-b87f-4dae-9744-03ef948bf068\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.923342 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-utilities\") pod \"79c9c319-b87f-4dae-9744-03ef948bf068\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.923499 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-catalog-content\") pod \"79c9c319-b87f-4dae-9744-03ef948bf068\" (UID: \"79c9c319-b87f-4dae-9744-03ef948bf068\") " Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.923778 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hs8hm\" (UniqueName: \"kubernetes.io/projected/01585b85-8743-46df-bf57-28b9c7101515-kube-api-access-hs8hm\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.923800 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.928619 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-utilities" (OuterVolumeSpecName: "utilities") pod "79c9c319-b87f-4dae-9744-03ef948bf068" (UID: "79c9c319-b87f-4dae-9744-03ef948bf068"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.936190 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/79c9c319-b87f-4dae-9744-03ef948bf068-kube-api-access-ptsx9" (OuterVolumeSpecName: "kube-api-access-ptsx9") pod "79c9c319-b87f-4dae-9744-03ef948bf068" (UID: "79c9c319-b87f-4dae-9744-03ef948bf068"). InnerVolumeSpecName "kube-api-access-ptsx9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.949082 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "01585b85-8743-46df-bf57-28b9c7101515" (UID: "01585b85-8743-46df-bf57-28b9c7101515"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.949453 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-hlfr2" event={"ID":"01585b85-8743-46df-bf57-28b9c7101515","Type":"ContainerDied","Data":"2052cb42258f34325d97cc8217f93ce703e02ae44cf5e0faec400db1b7719209"} Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.949500 4702 scope.go:117] "RemoveContainer" containerID="82351b6db28029ffcee108fe757f14e4156eca8b9d3a49945f316fda2ae2f586" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.949617 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-hlfr2" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.953529 4702 generic.go:334] "Generic (PLEG): container finished" podID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerID="1f23c6ae047c3758b97a19b0c7d6b75e736fd177f6640f03a6b833c0d09a06af" exitCode=0 Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.953578 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerDied","Data":"1f23c6ae047c3758b97a19b0c7d6b75e736fd177f6640f03a6b833c0d09a06af"} Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.957297 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7w9lw" event={"ID":"d93c64ec-5a95-4cef-a289-9fee39d7466f","Type":"ContainerDied","Data":"6dedcd705d28953e8e9e4e511da93379804c74772b18e382b69b2cf0fb9b2558"} Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.957399 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7w9lw" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.959437 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-t6m9v" event={"ID":"2db4d43f-926d-4fba-84d8-e49c594c5026","Type":"ContainerDied","Data":"842edda2ff09462696822edf78465b5a163d6af146ee4c0c896577e9223ec735"} Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.959533 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-t6m9v" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.968531 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-gzp7g" event={"ID":"2980b36d-ef86-443d-9c30-b38cdf91e95b","Type":"ContainerDied","Data":"917483e3bb236d6587f4b0e50db9efda2f253cfe8730a3cd2e3d482f944640bc"} Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.968733 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-gzp7g" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.975951 4702 scope.go:117] "RemoveContainer" containerID="89f2d6b06896e0358771ba2495015c0710d38a6168a7fb156a516fb2c5796c2c" Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.990745 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w9lw"] Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.992744 4702 generic.go:334] "Generic (PLEG): container finished" podID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerID="4eb8d937e6f1dcba11a8c30d56006efeb607742c4550b50c45d3e74c89e2aaae" exitCode=0 Nov 25 10:48:35 crc kubenswrapper[4702]: I1125 10:48:35.992822 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerDied","Data":"4eb8d937e6f1dcba11a8c30d56006efeb607742c4550b50c45d3e74c89e2aaae"} Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.001344 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7w9lw"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.012609 4702 generic.go:334] "Generic (PLEG): container finished" podID="79c9c319-b87f-4dae-9744-03ef948bf068" containerID="d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279" exitCode=0 Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.012934 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerDied","Data":"d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279"} Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.013038 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5hzg9" event={"ID":"79c9c319-b87f-4dae-9744-03ef948bf068","Type":"ContainerDied","Data":"b7e84d29ad605cc26f7c6c5ddc5468a7ab33f47f6c49dd8c320c56dcc0501601"} Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.013209 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5hzg9" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.025642 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ptsx9\" (UniqueName: \"kubernetes.io/projected/79c9c319-b87f-4dae-9744-03ef948bf068-kube-api-access-ptsx9\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.025677 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.025686 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/01585b85-8743-46df-bf57-28b9c7101515-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.025999 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-t6m9v"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.026454 4702 scope.go:117] "RemoveContainer" containerID="2182f112f372e03b18f3d486238265ec161dc6dec68a99f6a5ab4ef0ecf5dca2" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.031229 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-t6m9v"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.040781 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-gzp7g"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.054868 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-gzp7g"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.060721 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-hlfr2"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.064794 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-hlfr2"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.083709 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "79c9c319-b87f-4dae-9744-03ef948bf068" (UID: "79c9c319-b87f-4dae-9744-03ef948bf068"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.128663 4702 scope.go:117] "RemoveContainer" containerID="bc01dd268d4cfd6f44de94fa63a3fac8de6a9743f855f1ae210d05d66f2841f2" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.129437 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/79c9c319-b87f-4dae-9744-03ef948bf068-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.137030 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.155490 4702 scope.go:117] "RemoveContainer" containerID="037b3c1c6f2324880250d8e04d17f81303340b48494297365008ae09108c3bc0" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.274181 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.280168 4702 scope.go:117] "RemoveContainer" containerID="82122dd77526fef9035b0c8d136e2b837e960f410d5b72dd94297a6249b6bb55" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.305879 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxsrw"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.306207 4702 scope.go:117] "RemoveContainer" containerID="180c73a750d26020495abd6139e9e036a1ecb8b9b101a83b25607c3f17a46d08" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.306206 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-jxsrw" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="registry-server" containerID="cri-o://783d6bd458aec5722b89c608e56097944471206201e4df1cd031fbbda7dcfca3" gracePeriod=2 Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.330407 4702 scope.go:117] "RemoveContainer" containerID="46d2e1826050e681166045d108c5aa17d6f5ae06fd6e1a3dbf4c70ef0ac7e174" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.357803 4702 scope.go:117] "RemoveContainer" containerID="c046361a896970752f03bd584865bcd21aa43b87e29a639b6ba236de0da97b67" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.360162 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5hzg9"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.364094 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5hzg9"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.373440 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ss7gc\" (UniqueName: \"kubernetes.io/projected/215f2731-6dc6-465a-a076-7a08feb8e5b6-kube-api-access-ss7gc\") pod \"215f2731-6dc6-465a-a076-7a08feb8e5b6\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.373549 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c2n8k\" (UniqueName: \"kubernetes.io/projected/e64ea1d5-d260-4331-bc5d-800fd8248ff7-kube-api-access-c2n8k\") pod \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.374507 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-catalog-content\") pod \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.374551 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-utilities\") pod \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\" (UID: \"e64ea1d5-d260-4331-bc5d-800fd8248ff7\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.374579 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-utilities\") pod \"215f2731-6dc6-465a-a076-7a08feb8e5b6\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.374640 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-catalog-content\") pod \"215f2731-6dc6-465a-a076-7a08feb8e5b6\" (UID: \"215f2731-6dc6-465a-a076-7a08feb8e5b6\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.377592 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-utilities" (OuterVolumeSpecName: "utilities") pod "e64ea1d5-d260-4331-bc5d-800fd8248ff7" (UID: "e64ea1d5-d260-4331-bc5d-800fd8248ff7"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.378277 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/215f2731-6dc6-465a-a076-7a08feb8e5b6-kube-api-access-ss7gc" (OuterVolumeSpecName: "kube-api-access-ss7gc") pod "215f2731-6dc6-465a-a076-7a08feb8e5b6" (UID: "215f2731-6dc6-465a-a076-7a08feb8e5b6"). InnerVolumeSpecName "kube-api-access-ss7gc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.378733 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e64ea1d5-d260-4331-bc5d-800fd8248ff7-kube-api-access-c2n8k" (OuterVolumeSpecName: "kube-api-access-c2n8k") pod "e64ea1d5-d260-4331-bc5d-800fd8248ff7" (UID: "e64ea1d5-d260-4331-bc5d-800fd8248ff7"). InnerVolumeSpecName "kube-api-access-c2n8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.378855 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-utilities" (OuterVolumeSpecName: "utilities") pod "215f2731-6dc6-465a-a076-7a08feb8e5b6" (UID: "215f2731-6dc6-465a-a076-7a08feb8e5b6"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.380267 4702 scope.go:117] "RemoveContainer" containerID="73d0421047e9319ed5185bfb7113f9a3677c7da744fcf150b2a0f9017f233213" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.396259 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e64ea1d5-d260-4331-bc5d-800fd8248ff7" (UID: "e64ea1d5-d260-4331-bc5d-800fd8248ff7"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.401981 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "215f2731-6dc6-465a-a076-7a08feb8e5b6" (UID: "215f2731-6dc6-465a-a076-7a08feb8e5b6"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.408475 4702 scope.go:117] "RemoveContainer" containerID="a4534a8d21211eed9be3946742c09b58c094f230f2780333ba75f9d7ab30b45f" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.431222 4702 scope.go:117] "RemoveContainer" containerID="bf8c5072c415fd49b509745f326edc235cec479d70a76e668101010ed8d05023" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.448030 4702 scope.go:117] "RemoveContainer" containerID="d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.464574 4702 scope.go:117] "RemoveContainer" containerID="d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475466 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475507 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e64ea1d5-d260-4331-bc5d-800fd8248ff7-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475518 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475528 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/215f2731-6dc6-465a-a076-7a08feb8e5b6-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475545 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ss7gc\" (UniqueName: \"kubernetes.io/projected/215f2731-6dc6-465a-a076-7a08feb8e5b6-kube-api-access-ss7gc\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475558 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c2n8k\" (UniqueName: \"kubernetes.io/projected/e64ea1d5-d260-4331-bc5d-800fd8248ff7-kube-api-access-c2n8k\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.475464 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.486646 4702 scope.go:117] "RemoveContainer" containerID="e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.508622 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wrrrc"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.508933 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-wrrrc" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="registry-server" containerID="cri-o://6f606f4f27c6104c8d96f6a3a79c56cecd43939d839a7d854ae41a1538940b0e" gracePeriod=2 Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.525138 4702 scope.go:117] "RemoveContainer" containerID="d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279" Nov 25 10:48:36 crc kubenswrapper[4702]: E1125 10:48:36.525575 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279\": container with ID starting with d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279 not found: ID does not exist" containerID="d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.525609 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279"} err="failed to get container status \"d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279\": rpc error: code = NotFound desc = could not find container \"d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279\": container with ID starting with d9b22e68b4e9cb7b9706316cb97cb53b911881be9d077a8950f0c494a62d9279 not found: ID does not exist" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.525635 4702 scope.go:117] "RemoveContainer" containerID="d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531" Nov 25 10:48:36 crc kubenswrapper[4702]: E1125 10:48:36.526193 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531\": container with ID starting with d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531 not found: ID does not exist" containerID="d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.526217 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531"} err="failed to get container status \"d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531\": rpc error: code = NotFound desc = could not find container \"d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531\": container with ID starting with d63d3a220b101897a402fb14553715e3a4fb5cec4b05d593a03c81a3773a3531 not found: ID does not exist" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.526238 4702 scope.go:117] "RemoveContainer" containerID="e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7" Nov 25 10:48:36 crc kubenswrapper[4702]: E1125 10:48:36.528250 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7\": container with ID starting with e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7 not found: ID does not exist" containerID="e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.528296 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7"} err="failed to get container status \"e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7\": rpc error: code = NotFound desc = could not find container \"e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7\": container with ID starting with e28945c43e2a786a6d667b55e287ec7a43aa12b067774e9c846949f7448b92e7 not found: ID does not exist" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.576234 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mls7d\" (UniqueName: \"kubernetes.io/projected/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-kube-api-access-mls7d\") pod \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.576367 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-utilities\") pod \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.576447 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-catalog-content\") pod \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\" (UID: \"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b\") " Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.577571 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-utilities" (OuterVolumeSpecName: "utilities") pod "1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" (UID: "1a938c8e-fc7b-4c65-94e9-e656aea9bf9b"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.582599 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-kube-api-access-mls7d" (OuterVolumeSpecName: "kube-api-access-mls7d") pod "1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" (UID: "1a938c8e-fc7b-4c65-94e9-e656aea9bf9b"). InnerVolumeSpecName "kube-api-access-mls7d". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.663542 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" (UID: "1a938c8e-fc7b-4c65-94e9-e656aea9bf9b"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.678635 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.678672 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.678683 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mls7d\" (UniqueName: \"kubernetes.io/projected/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b-kube-api-access-mls7d\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.835784 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-5c9db6d78-h6mmv" Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.911256 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzfg8"] Nov 25 10:48:36 crc kubenswrapper[4702]: I1125 10:48:36.911534 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wzfg8" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="registry-server" containerID="cri-o://9e580642f7e0e972101e0bd568cff0139ad368010c869c8f6dd8f70b5cf2e7d0" gracePeriod=2 Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.039891 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-gqzjk" event={"ID":"215f2731-6dc6-465a-a076-7a08feb8e5b6","Type":"ContainerDied","Data":"4db64d0388c36c243d9a80e82dbeb7ec0cc99cb3c9dbd2a619370d5f89a02695"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.040374 4702 scope.go:117] "RemoveContainer" containerID="1f23c6ae047c3758b97a19b0c7d6b75e736fd177f6640f03a6b833c0d09a06af" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.040172 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-gqzjk" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.047680 4702 generic.go:334] "Generic (PLEG): container finished" podID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerID="bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343" exitCode=0 Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.047756 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerDied","Data":"bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.047779 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-tnqkn" event={"ID":"1a938c8e-fc7b-4c65-94e9-e656aea9bf9b","Type":"ContainerDied","Data":"88fbaa4787ab3cd6bd9fbd964e31c941c5a428bd9d6377d08c5191c39b9d8d1d"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.047864 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-tnqkn" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.057487 4702 generic.go:334] "Generic (PLEG): container finished" podID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerID="783d6bd458aec5722b89c608e56097944471206201e4df1cd031fbbda7dcfca3" exitCode=0 Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.057565 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxsrw" event={"ID":"a2a179ea-7517-4ae8-adb3-15ddc3b759fa","Type":"ContainerDied","Data":"783d6bd458aec5722b89c608e56097944471206201e4df1cd031fbbda7dcfca3"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.064074 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-x8rxw" event={"ID":"e64ea1d5-d260-4331-bc5d-800fd8248ff7","Type":"ContainerDied","Data":"4d009bfe5ec394c8ffe6f1299352b541c461d952882348b0f2f7f9c6e7b9d015"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.064170 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-x8rxw" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.073016 4702 generic.go:334] "Generic (PLEG): container finished" podID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerID="9e580642f7e0e972101e0bd568cff0139ad368010c869c8f6dd8f70b5cf2e7d0" exitCode=0 Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.073082 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerDied","Data":"9e580642f7e0e972101e0bd568cff0139ad368010c869c8f6dd8f70b5cf2e7d0"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.083540 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-tnqkn"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.086105 4702 generic.go:334] "Generic (PLEG): container finished" podID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerID="6f606f4f27c6104c8d96f6a3a79c56cecd43939d839a7d854ae41a1538940b0e" exitCode=0 Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.086190 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerDied","Data":"6f606f4f27c6104c8d96f6a3a79c56cecd43939d839a7d854ae41a1538940b0e"} Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.087503 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-tnqkn"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.097120 4702 scope.go:117] "RemoveContainer" containerID="6f9fd10c0b4adf0cf04ecdad8312bbf5cab2e31f03b353d339872eaa787d3a8b" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.110792 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8rxw"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.115085 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-x8rxw"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.125238 4702 scope.go:117] "RemoveContainer" containerID="eb7e9f68f7df7e15ec0fc3ab662c2a27e280714158a6e351bb3ae16dcbed1d1a" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.146600 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-gqzjk"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.152051 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-gqzjk"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.167261 4702 scope.go:117] "RemoveContainer" containerID="bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.198617 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.201000 4702 scope.go:117] "RemoveContainer" containerID="428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.242138 4702 scope.go:117] "RemoveContainer" containerID="ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.258940 4702 scope.go:117] "RemoveContainer" containerID="bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343" Nov 25 10:48:37 crc kubenswrapper[4702]: E1125 10:48:37.259380 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343\": container with ID starting with bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343 not found: ID does not exist" containerID="bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.259420 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343"} err="failed to get container status \"bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343\": rpc error: code = NotFound desc = could not find container \"bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343\": container with ID starting with bc05a0f4a091b5a0daae27fba7b67c3d209e4227685bd62c2fa7f1041f129343 not found: ID does not exist" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.259448 4702 scope.go:117] "RemoveContainer" containerID="428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb" Nov 25 10:48:37 crc kubenswrapper[4702]: E1125 10:48:37.259688 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb\": container with ID starting with 428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb not found: ID does not exist" containerID="428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.259715 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb"} err="failed to get container status \"428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb\": rpc error: code = NotFound desc = could not find container \"428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb\": container with ID starting with 428b85097972d4004f7306f926f6ec65db1eaa7ec488c032eb180e6789861edb not found: ID does not exist" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.259734 4702 scope.go:117] "RemoveContainer" containerID="ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94" Nov 25 10:48:37 crc kubenswrapper[4702]: E1125 10:48:37.260018 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94\": container with ID starting with ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94 not found: ID does not exist" containerID="ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.260085 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94"} err="failed to get container status \"ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94\": rpc error: code = NotFound desc = could not find container \"ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94\": container with ID starting with ef614cb97448475dade9f043dd8dfee746d752c9e17aea628c19cdf212c3ee94 not found: ID does not exist" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.260135 4702 scope.go:117] "RemoveContainer" containerID="4eb8d937e6f1dcba11a8c30d56006efeb607742c4550b50c45d3e74c89e2aaae" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.277191 4702 scope.go:117] "RemoveContainer" containerID="f3925226d9a8916e34a32cb423b6c9e27090073e2c1cbd6fe75098656536c3f3" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.289042 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-utilities\") pod \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.289112 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-klcpn\" (UniqueName: \"kubernetes.io/projected/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-kube-api-access-klcpn\") pod \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.289236 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-catalog-content\") pod \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\" (UID: \"a2a179ea-7517-4ae8-adb3-15ddc3b759fa\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.289914 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-utilities" (OuterVolumeSpecName: "utilities") pod "a2a179ea-7517-4ae8-adb3-15ddc3b759fa" (UID: "a2a179ea-7517-4ae8-adb3-15ddc3b759fa"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.290135 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.292924 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-kube-api-access-klcpn" (OuterVolumeSpecName: "kube-api-access-klcpn") pod "a2a179ea-7517-4ae8-adb3-15ddc3b759fa" (UID: "a2a179ea-7517-4ae8-adb3-15ddc3b759fa"). InnerVolumeSpecName "kube-api-access-klcpn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.294674 4702 scope.go:117] "RemoveContainer" containerID="8d4133e070099b322dd6280824c5cb2c572631b32a6396a1a9f886f5d09c184e" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.308587 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "a2a179ea-7517-4ae8-adb3-15ddc3b759fa" (UID: "a2a179ea-7517-4ae8-adb3-15ddc3b759fa"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.391894 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.391958 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-klcpn\" (UniqueName: \"kubernetes.io/projected/a2a179ea-7517-4ae8-adb3-15ddc3b759fa-kube-api-access-klcpn\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.414207 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01585b85-8743-46df-bf57-28b9c7101515" path="/var/lib/kubelet/pods/01585b85-8743-46df-bf57-28b9c7101515/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.414797 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" path="/var/lib/kubelet/pods/1a938c8e-fc7b-4c65-94e9-e656aea9bf9b/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.415505 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" path="/var/lib/kubelet/pods/215f2731-6dc6-465a-a076-7a08feb8e5b6/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.416776 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" path="/var/lib/kubelet/pods/2980b36d-ef86-443d-9c30-b38cdf91e95b/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.417513 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" path="/var/lib/kubelet/pods/2db4d43f-926d-4fba-84d8-e49c594c5026/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.418771 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" path="/var/lib/kubelet/pods/79c9c319-b87f-4dae-9744-03ef948bf068/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.419477 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" path="/var/lib/kubelet/pods/d93c64ec-5a95-4cef-a289-9fee39d7466f/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.420159 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" path="/var/lib/kubelet/pods/e64ea1d5-d260-4331-bc5d-800fd8248ff7/volumes" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.504972 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffp8k"] Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.505262 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ffp8k" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="registry-server" containerID="cri-o://5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be" gracePeriod=2 Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.580211 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.594344 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-utilities\") pod \"d3c5667b-d960-49fb-b84c-cc17236f96f3\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.594468 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-catalog-content\") pod \"d3c5667b-d960-49fb-b84c-cc17236f96f3\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.594515 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pwmvk\" (UniqueName: \"kubernetes.io/projected/d3c5667b-d960-49fb-b84c-cc17236f96f3-kube-api-access-pwmvk\") pod \"d3c5667b-d960-49fb-b84c-cc17236f96f3\" (UID: \"d3c5667b-d960-49fb-b84c-cc17236f96f3\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.595152 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-utilities" (OuterVolumeSpecName: "utilities") pod "d3c5667b-d960-49fb-b84c-cc17236f96f3" (UID: "d3c5667b-d960-49fb-b84c-cc17236f96f3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.607918 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d3c5667b-d960-49fb-b84c-cc17236f96f3-kube-api-access-pwmvk" (OuterVolumeSpecName: "kube-api-access-pwmvk") pod "d3c5667b-d960-49fb-b84c-cc17236f96f3" (UID: "d3c5667b-d960-49fb-b84c-cc17236f96f3"). InnerVolumeSpecName "kube-api-access-pwmvk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.612714 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.619268 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "d3c5667b-d960-49fb-b84c-cc17236f96f3" (UID: "d3c5667b-d960-49fb-b84c-cc17236f96f3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.695365 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tdss8\" (UniqueName: \"kubernetes.io/projected/57ac8391-54bc-457b-a6fe-8f4e761f53d1-kube-api-access-tdss8\") pod \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.695552 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-catalog-content\") pod \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.695593 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-utilities\") pod \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\" (UID: \"57ac8391-54bc-457b-a6fe-8f4e761f53d1\") " Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.695816 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pwmvk\" (UniqueName: \"kubernetes.io/projected/d3c5667b-d960-49fb-b84c-cc17236f96f3-kube-api-access-pwmvk\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.695834 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.695845 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/d3c5667b-d960-49fb-b84c-cc17236f96f3-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.696716 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-utilities" (OuterVolumeSpecName: "utilities") pod "57ac8391-54bc-457b-a6fe-8f4e761f53d1" (UID: "57ac8391-54bc-457b-a6fe-8f4e761f53d1"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.700206 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57ac8391-54bc-457b-a6fe-8f4e761f53d1-kube-api-access-tdss8" (OuterVolumeSpecName: "kube-api-access-tdss8") pod "57ac8391-54bc-457b-a6fe-8f4e761f53d1" (UID: "57ac8391-54bc-457b-a6fe-8f4e761f53d1"). InnerVolumeSpecName "kube-api-access-tdss8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.789497 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57ac8391-54bc-457b-a6fe-8f4e761f53d1" (UID: "57ac8391-54bc-457b-a6fe-8f4e761f53d1"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.797197 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tdss8\" (UniqueName: \"kubernetes.io/projected/57ac8391-54bc-457b-a6fe-8f4e761f53d1-kube-api-access-tdss8\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.797249 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.797264 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57ac8391-54bc-457b-a6fe-8f4e761f53d1-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:37 crc kubenswrapper[4702]: I1125 10:48:37.941985 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.001436 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-catalog-content\") pod \"73a2fc01-b4e5-413b-ba71-d37d5853d135\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.001538 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-utilities\") pod \"73a2fc01-b4e5-413b-ba71-d37d5853d135\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.001584 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fsmdk\" (UniqueName: \"kubernetes.io/projected/73a2fc01-b4e5-413b-ba71-d37d5853d135-kube-api-access-fsmdk\") pod \"73a2fc01-b4e5-413b-ba71-d37d5853d135\" (UID: \"73a2fc01-b4e5-413b-ba71-d37d5853d135\") " Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.002559 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-utilities" (OuterVolumeSpecName: "utilities") pod "73a2fc01-b4e5-413b-ba71-d37d5853d135" (UID: "73a2fc01-b4e5-413b-ba71-d37d5853d135"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.005555 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/73a2fc01-b4e5-413b-ba71-d37d5853d135-kube-api-access-fsmdk" (OuterVolumeSpecName: "kube-api-access-fsmdk") pod "73a2fc01-b4e5-413b-ba71-d37d5853d135" (UID: "73a2fc01-b4e5-413b-ba71-d37d5853d135"). InnerVolumeSpecName "kube-api-access-fsmdk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.029554 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "73a2fc01-b4e5-413b-ba71-d37d5853d135" (UID: "73a2fc01-b4e5-413b-ba71-d37d5853d135"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.096965 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-jxsrw" event={"ID":"a2a179ea-7517-4ae8-adb3-15ddc3b759fa","Type":"ContainerDied","Data":"3c76bcde9c4a20d3ecf2f5869a68323ef31fe53655a5207579db184a9d31df7f"} Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.097023 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-jxsrw" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.097035 4702 scope.go:117] "RemoveContainer" containerID="783d6bd458aec5722b89c608e56097944471206201e4df1cd031fbbda7dcfca3" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.103093 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.103128 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/73a2fc01-b4e5-413b-ba71-d37d5853d135-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.103138 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fsmdk\" (UniqueName: \"kubernetes.io/projected/73a2fc01-b4e5-413b-ba71-d37d5853d135-kube-api-access-fsmdk\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.103397 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-wrrrc" event={"ID":"57ac8391-54bc-457b-a6fe-8f4e761f53d1","Type":"ContainerDied","Data":"c78f10548d460922242d5cc9986603562aa3bf5add9a772a059a8009cbdd4596"} Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.103516 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-wrrrc" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.108582 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scblb"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.109000 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-scblb" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="registry-server" containerID="cri-o://1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7" gracePeriod=2 Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.110813 4702 generic.go:334] "Generic (PLEG): container finished" podID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerID="5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be" exitCode=0 Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.110934 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerDied","Data":"5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be"} Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.110961 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffp8k" event={"ID":"73a2fc01-b4e5-413b-ba71-d37d5853d135","Type":"ContainerDied","Data":"dddd42a4e444fe8d3e272b24af6b08fc6027b2df3c7eef34dc3af79b8ca08cd2"} Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.111024 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffp8k" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.114761 4702 scope.go:117] "RemoveContainer" containerID="2211c78e4f70b91f2424a7cb45761db3e2b0827b0e41dbbe17450ae991589392" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.117115 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wzfg8" event={"ID":"d3c5667b-d960-49fb-b84c-cc17236f96f3","Type":"ContainerDied","Data":"a0790dc1bf7f5ccfc28c0df08698bc13c3813e4bc6590fc11ead99c4a85ac576"} Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.117225 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wzfg8" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.126519 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxsrw"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.132323 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-jxsrw"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.170863 4702 scope.go:117] "RemoveContainer" containerID="8799a28dbc318bae877852a246807291e4169ad4407afbaedc9dcf3f546eda27" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.173106 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-wrrrc"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.181555 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-wrrrc"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.194014 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffp8k"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.203052 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffp8k"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.208069 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzfg8"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.210973 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-wzfg8"] Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.259569 4702 scope.go:117] "RemoveContainer" containerID="6f606f4f27c6104c8d96f6a3a79c56cecd43939d839a7d854ae41a1538940b0e" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.276027 4702 scope.go:117] "RemoveContainer" containerID="f114c1169e82ab1054c255f8010724a1df98d58cb5ec6af5dfd4fba539246375" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.292027 4702 scope.go:117] "RemoveContainer" containerID="e158bb1f510a144c4ea5bd46b430711bea5419ff4d7a1a99ee18ae5090af79f7" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.307269 4702 scope.go:117] "RemoveContainer" containerID="5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.321555 4702 scope.go:117] "RemoveContainer" containerID="8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.339023 4702 scope.go:117] "RemoveContainer" containerID="a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.358154 4702 scope.go:117] "RemoveContainer" containerID="5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be" Nov 25 10:48:38 crc kubenswrapper[4702]: E1125 10:48:38.358937 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be\": container with ID starting with 5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be not found: ID does not exist" containerID="5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.358995 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be"} err="failed to get container status \"5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be\": rpc error: code = NotFound desc = could not find container \"5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be\": container with ID starting with 5086a2d7bdfbb04b4b2de988cb3bfa0e4b08820242b890e987d3220b0cf484be not found: ID does not exist" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.359036 4702 scope.go:117] "RemoveContainer" containerID="8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3" Nov 25 10:48:38 crc kubenswrapper[4702]: E1125 10:48:38.359371 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3\": container with ID starting with 8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3 not found: ID does not exist" containerID="8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.359394 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3"} err="failed to get container status \"8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3\": rpc error: code = NotFound desc = could not find container \"8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3\": container with ID starting with 8785c9e75a43c8f5f94fa465bee511b311128a8f400e9b67ea00c32ff4e048c3 not found: ID does not exist" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.359408 4702 scope.go:117] "RemoveContainer" containerID="a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5" Nov 25 10:48:38 crc kubenswrapper[4702]: E1125 10:48:38.360468 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5\": container with ID starting with a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5 not found: ID does not exist" containerID="a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.360511 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5"} err="failed to get container status \"a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5\": rpc error: code = NotFound desc = could not find container \"a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5\": container with ID starting with a8bd926934521c48a1e782f3e77bad6d9d75957a267e71b3ac7848874d081de5 not found: ID does not exist" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.360528 4702 scope.go:117] "RemoveContainer" containerID="9e580642f7e0e972101e0bd568cff0139ad368010c869c8f6dd8f70b5cf2e7d0" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.376127 4702 scope.go:117] "RemoveContainer" containerID="48c0a46061ed4552d4299319479d17c121c2124efa5c49730c68bcefc3c3d155" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.399053 4702 scope.go:117] "RemoveContainer" containerID="15a02248c7db9a98d1211202b96246d97b09aafaeee81f36f03320c259aeefcf" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.584186 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.608378 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-catalog-content\") pod \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.608447 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-utilities\") pod \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.608517 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vnxsc\" (UniqueName: \"kubernetes.io/projected/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-kube-api-access-vnxsc\") pod \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\" (UID: \"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9\") " Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.612613 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-kube-api-access-vnxsc" (OuterVolumeSpecName: "kube-api-access-vnxsc") pod "55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" (UID: "55dc663c-8dca-4d62-a20d-ffb82fdd5ee9"). InnerVolumeSpecName "kube-api-access-vnxsc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.617590 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-utilities" (OuterVolumeSpecName: "utilities") pod "55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" (UID: "55dc663c-8dca-4d62-a20d-ffb82fdd5ee9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.630605 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" (UID: "55dc663c-8dca-4d62-a20d-ffb82fdd5ee9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.710594 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.710650 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:38 crc kubenswrapper[4702]: I1125 10:48:38.710665 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vnxsc\" (UniqueName: \"kubernetes.io/projected/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9-kube-api-access-vnxsc\") on node \"crc\" DevicePath \"\"" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.127628 4702 generic.go:334] "Generic (PLEG): container finished" podID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerID="1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7" exitCode=0 Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.127707 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-scblb" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.127715 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scblb" event={"ID":"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9","Type":"ContainerDied","Data":"1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7"} Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.127825 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-scblb" event={"ID":"55dc663c-8dca-4d62-a20d-ffb82fdd5ee9","Type":"ContainerDied","Data":"9926b7bd4e16787fea4c2e6c0a16c1743d0459e3d514a9b5d85fc234010af70f"} Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.127846 4702 scope.go:117] "RemoveContainer" containerID="1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.142835 4702 scope.go:117] "RemoveContainer" containerID="719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.154887 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-scblb"] Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.163421 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-scblb"] Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.163577 4702 scope.go:117] "RemoveContainer" containerID="64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.184933 4702 scope.go:117] "RemoveContainer" containerID="1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7" Nov 25 10:48:39 crc kubenswrapper[4702]: E1125 10:48:39.188521 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7\": container with ID starting with 1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7 not found: ID does not exist" containerID="1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.188595 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7"} err="failed to get container status \"1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7\": rpc error: code = NotFound desc = could not find container \"1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7\": container with ID starting with 1928f5120f6198d745cf27acd9e5e4430df6b2d705fd505e4d29ab032f76e9b7 not found: ID does not exist" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.188638 4702 scope.go:117] "RemoveContainer" containerID="719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f" Nov 25 10:48:39 crc kubenswrapper[4702]: E1125 10:48:39.189547 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f\": container with ID starting with 719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f not found: ID does not exist" containerID="719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.189678 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f"} err="failed to get container status \"719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f\": rpc error: code = NotFound desc = could not find container \"719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f\": container with ID starting with 719f9bdfdd7172b45882e7461903b3e43eb36bea36e5109630a14a1b4be56c1f not found: ID does not exist" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.189705 4702 scope.go:117] "RemoveContainer" containerID="64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98" Nov 25 10:48:39 crc kubenswrapper[4702]: E1125 10:48:39.191207 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98\": container with ID starting with 64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98 not found: ID does not exist" containerID="64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.191259 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98"} err="failed to get container status \"64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98\": rpc error: code = NotFound desc = could not find container \"64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98\": container with ID starting with 64dd605f2c1b06f14505cc7a30c91b7e230cf70698dbee09ecfaee2831340f98 not found: ID does not exist" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.409173 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" path="/var/lib/kubelet/pods/55dc663c-8dca-4d62-a20d-ffb82fdd5ee9/volumes" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.410115 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" path="/var/lib/kubelet/pods/57ac8391-54bc-457b-a6fe-8f4e761f53d1/volumes" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.411038 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" path="/var/lib/kubelet/pods/73a2fc01-b4e5-413b-ba71-d37d5853d135/volumes" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.412525 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" path="/var/lib/kubelet/pods/a2a179ea-7517-4ae8-adb3-15ddc3b759fa/volumes" Nov 25 10:48:39 crc kubenswrapper[4702]: I1125 10:48:39.413408 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" path="/var/lib/kubelet/pods/d3c5667b-d960-49fb-b84c-cc17236f96f3/volumes" Nov 25 10:48:43 crc kubenswrapper[4702]: I1125 10:48:43.591026 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:48:43 crc kubenswrapper[4702]: I1125 10:48:43.591390 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:48:43 crc kubenswrapper[4702]: I1125 10:48:43.591442 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:48:43 crc kubenswrapper[4702]: I1125 10:48:43.592043 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"3dfc5653761b32b3ec56bc025998710cbd6ef0729baccffe43614c093e896dd9"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:48:43 crc kubenswrapper[4702]: I1125 10:48:43.592094 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://3dfc5653761b32b3ec56bc025998710cbd6ef0729baccffe43614c093e896dd9" gracePeriod=600 Nov 25 10:48:44 crc kubenswrapper[4702]: I1125 10:48:44.161390 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="3dfc5653761b32b3ec56bc025998710cbd6ef0729baccffe43614c093e896dd9" exitCode=0 Nov 25 10:48:44 crc kubenswrapper[4702]: I1125 10:48:44.161469 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"3dfc5653761b32b3ec56bc025998710cbd6ef0729baccffe43614c093e896dd9"} Nov 25 10:48:44 crc kubenswrapper[4702]: I1125 10:48:44.161546 4702 scope.go:117] "RemoveContainer" containerID="7918e86b322b2cb7cfa46c8cc2dfa0c27b9015c392a35a3637cb12006c4d3205" Nov 25 10:48:45 crc kubenswrapper[4702]: I1125 10:48:45.168670 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"c995a3a58802015484aaf059ef2d7a1f54e8b9c0222aaf2fd6574984d2674473"} Nov 25 10:48:45 crc kubenswrapper[4702]: I1125 10:48:45.402684 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 10:48:45 crc kubenswrapper[4702]: E1125 10:48:45.402954 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:48:56 crc kubenswrapper[4702]: I1125 10:48:56.524417 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-d9d597dc8-q6ll4" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.321525 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw"] Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322280 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322303 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322315 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322324 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322335 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322344 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322358 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322368 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322381 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322389 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322400 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322407 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322418 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322426 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322436 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322443 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322456 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322464 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322472 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322479 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322491 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322499 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322514 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322522 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322530 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322538 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322552 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322560 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322572 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322579 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322588 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322597 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322609 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322617 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322630 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322638 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322650 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322657 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322668 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322675 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322686 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322694 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322704 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322712 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322721 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322729 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322737 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322745 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322756 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322765 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322777 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322785 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322795 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322802 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322816 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322824 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322839 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322847 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322858 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322865 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322880 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322887 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322915 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322925 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322937 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322947 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322960 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322968 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.322980 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.322988 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323002 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323011 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323020 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323028 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323038 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323046 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323055 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323062 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323073 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323081 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323092 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323101 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323113 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323120 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323132 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323140 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323151 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323159 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323169 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323176 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323187 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323195 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323204 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323211 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323223 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323230 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323241 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323248 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323258 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323265 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323277 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323285 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323297 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323305 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323313 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323321 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323330 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323338 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323348 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323356 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323367 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323375 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323383 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323390 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323401 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323409 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323417 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323425 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323435 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323442 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323451 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323458 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323469 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323475 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323484 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323491 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323501 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323509 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323519 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323526 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323537 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323544 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323553 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323559 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323569 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323576 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323583 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323590 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323602 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323609 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323622 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323629 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323641 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323649 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323660 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323667 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323677 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323684 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323696 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323702 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323714 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323720 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323728 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323735 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323742 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323748 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323756 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323763 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323773 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323781 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323790 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323797 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323808 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323816 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323828 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323835 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323844 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323851 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323858 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323865 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323872 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323879 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323887 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323894 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323928 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323937 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323947 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323955 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323966 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323973 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.323981 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.323989 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.324000 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324007 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="extract-content" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.324015 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324023 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.324032 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324039 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.324047 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324053 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.324064 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324072 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="extract-utilities" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324267 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="215f2731-6dc6-465a-a076-7a08feb8e5b6" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324284 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c78fcba2-ffee-4ad3-bccd-085a90c81236" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324292 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="99bc87a4-5a14-4179-9e8b-4a49298b6f78" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324305 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="55dc663c-8dca-4d62-a20d-ffb82fdd5ee9" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324317 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db4d43f-926d-4fba-84d8-e49c594c5026" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324325 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a2a179ea-7517-4ae8-adb3-15ddc3b759fa" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324335 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="37a724f4-8b2c-4e9d-9502-85ac6fd9b9af" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324343 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="79c9c319-b87f-4dae-9744-03ef948bf068" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324351 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="243dc11a-4786-46eb-b000-2ccab5aeb028" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324361 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="d93c64ec-5a95-4cef-a289-9fee39d7466f" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324370 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="2980b36d-ef86-443d-9c30-b38cdf91e95b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324379 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="615b8e09-5a50-4af4-89dd-31fb6282baea" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324390 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="d3c5667b-d960-49fb-b84c-cc17236f96f3" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324398 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0738bd9-a74b-4aaa-a885-eae81ea2dc35" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324406 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="01585b85-8743-46df-bf57-28b9c7101515" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324418 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="d4c7f2d3-ab7f-410e-9af3-b59ff87c9bc4" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324427 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a183e71-4cd9-4ec1-8ec4-3eff9a0b7f62" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324438 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9dcc033-976b-440e-88ca-0c3b72212057" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324448 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="57ac8391-54bc-457b-a6fe-8f4e761f53d1" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324459 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="05a3f3b1-e2d1-41b4-88c8-4b024440d5e9" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324468 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab9c17d4-6fbd-4439-85ff-80db813e03a8" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324479 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="62348515-75a4-4328-beb2-9e7df5e23fc3" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324489 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="de760b35-119f-4975-8eeb-76e8f9adb9f1" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324498 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="9144c34a-7330-4d8b-aaa7-34747a3f4773" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324509 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e57438f5-de09-4857-b5fc-e67b4c8c443d" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324516 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="73a2fc01-b4e5-413b-ba71-d37d5853d135" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324526 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="69f8b3df-cceb-485b-b985-7bdad0788aef" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324537 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a7287d27-2a96-42f3-b8c5-1ca79d5c422a" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324547 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e64ea1d5-d260-4331-bc5d-800fd8248ff7" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324559 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="bf090575-6d44-4e0b-9522-cb864bb8169b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324569 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a938c8e-fc7b-4c65-94e9-e656aea9bf9b" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.324576 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="86eb5301-b8dd-4784-81c9-56375cbe983d" containerName="registry-server" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.325111 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.326942 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-5xblf" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.327300 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.334793 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw"] Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.344965 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eea53047-9f4a-400c-8db5-bcb0a8c08967-cert\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.345196 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tg2fc\" (UniqueName: \"kubernetes.io/projected/eea53047-9f4a-400c-8db5-bcb0a8c08967-kube-api-access-tg2fc\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.366629 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-rsjl6"] Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.369536 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.378366 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.378795 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.442035 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-m5tjs"] Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.443443 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447553 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r5c65\" (UniqueName: \"kubernetes.io/projected/776790d4-3b26-4355-b007-928895d8abda-kube-api-access-r5c65\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447604 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-frr-conf\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447652 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-metrics\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447707 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/776790d4-3b26-4355-b007-928895d8abda-metrics-certs\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447730 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tg2fc\" (UniqueName: \"kubernetes.io/projected/eea53047-9f4a-400c-8db5-bcb0a8c08967-kube-api-access-tg2fc\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447786 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/776790d4-3b26-4355-b007-928895d8abda-frr-startup\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447805 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eea53047-9f4a-400c-8db5-bcb0a8c08967-cert\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447821 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-reloader\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.447858 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-frr-sockets\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.448267 4702 secret.go:188] Couldn't get secret metallb-system/frr-k8s-webhook-server-cert: secret "frr-k8s-webhook-server-cert" not found Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.448310 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/eea53047-9f4a-400c-8db5-bcb0a8c08967-cert podName:eea53047-9f4a-400c-8db5-bcb0a8c08967 nodeName:}" failed. No retries permitted until 2025-11-25 10:48:57.948294633 +0000 UTC m=+1035.314890322 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "cert" (UniqueName: "kubernetes.io/secret/eea53047-9f4a-400c-8db5-bcb0a8c08967-cert") pod "frr-k8s-webhook-server-6998585d5-z5pzw" (UID: "eea53047-9f4a-400c-8db5-bcb0a8c08967") : secret "frr-k8s-webhook-server-cert" not found Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.452229 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.452410 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.455113 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.458156 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6c7b4b5f48-2ck8z"] Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.459214 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.459821 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-72zx8" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.461545 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.473731 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-2ck8z"] Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.476076 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tg2fc\" (UniqueName: \"kubernetes.io/projected/eea53047-9f4a-400c-8db5-bcb0a8c08967-kube-api-access-tg2fc\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549194 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m4vzh\" (UniqueName: \"kubernetes.io/projected/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-kube-api-access-m4vzh\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549369 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/776790d4-3b26-4355-b007-928895d8abda-frr-startup\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549424 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-reloader\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549455 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549484 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-frr-sockets\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549503 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metrics-certs\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.549559 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r5c65\" (UniqueName: \"kubernetes.io/projected/776790d4-3b26-4355-b007-928895d8abda-kube-api-access-r5c65\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550007 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-frr-sockets\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550047 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-frr-conf\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550113 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metallb-excludel2\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550149 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2bkdd\" (UniqueName: \"kubernetes.io/projected/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-kube-api-access-2bkdd\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550272 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-metrics\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550307 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-metrics-certs\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550352 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/776790d4-3b26-4355-b007-928895d8abda-metrics-certs\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550357 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-frr-conf\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550372 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-cert\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550585 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-metrics\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.550846 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/776790d4-3b26-4355-b007-928895d8abda-reloader\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.551194 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/776790d4-3b26-4355-b007-928895d8abda-frr-startup\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.569160 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r5c65\" (UniqueName: \"kubernetes.io/projected/776790d4-3b26-4355-b007-928895d8abda-kube-api-access-r5c65\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.577564 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/776790d4-3b26-4355-b007-928895d8abda-metrics-certs\") pod \"frr-k8s-rsjl6\" (UID: \"776790d4-3b26-4355-b007-928895d8abda\") " pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651645 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metallb-excludel2\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651697 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2bkdd\" (UniqueName: \"kubernetes.io/projected/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-kube-api-access-2bkdd\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651755 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-metrics-certs\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651785 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-cert\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651823 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m4vzh\" (UniqueName: \"kubernetes.io/projected/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-kube-api-access-m4vzh\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651870 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.651892 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metrics-certs\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.652063 4702 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.652121 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metrics-certs podName:92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf nodeName:}" failed. No retries permitted until 2025-11-25 10:48:58.152101957 +0000 UTC m=+1035.518697646 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metrics-certs") pod "speaker-m5tjs" (UID: "92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf") : secret "speaker-certs-secret" not found Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.652524 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metallb-excludel2\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.652536 4702 secret.go:188] Couldn't get secret metallb-system/controller-certs-secret: secret "controller-certs-secret" not found Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.652596 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-metrics-certs podName:973ff76e-6e14-4f6e-a0cb-fc3e5af9b694 nodeName:}" failed. No retries permitted until 2025-11-25 10:48:58.15258435 +0000 UTC m=+1035.519180039 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-metrics-certs") pod "controller-6c7b4b5f48-2ck8z" (UID: "973ff76e-6e14-4f6e-a0cb-fc3e5af9b694") : secret "controller-certs-secret" not found Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.652651 4702 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 25 10:48:57 crc kubenswrapper[4702]: E1125 10:48:57.652680 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist podName:92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf nodeName:}" failed. No retries permitted until 2025-11-25 10:48:58.152670382 +0000 UTC m=+1035.519266071 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist") pod "speaker-m5tjs" (UID: "92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf") : secret "metallb-memberlist" not found Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.654610 4702 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.665448 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-cert\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.675600 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m4vzh\" (UniqueName: \"kubernetes.io/projected/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-kube-api-access-m4vzh\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.679393 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2bkdd\" (UniqueName: \"kubernetes.io/projected/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-kube-api-access-2bkdd\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.684956 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.955066 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eea53047-9f4a-400c-8db5-bcb0a8c08967-cert\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:57 crc kubenswrapper[4702]: I1125 10:48:57.959059 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/eea53047-9f4a-400c-8db5-bcb0a8c08967-cert\") pod \"frr-k8s-webhook-server-6998585d5-z5pzw\" (UID: \"eea53047-9f4a-400c-8db5-bcb0a8c08967\") " pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.158643 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-metrics-certs\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.158732 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.158763 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metrics-certs\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:58 crc kubenswrapper[4702]: E1125 10:48:58.158955 4702 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Nov 25 10:48:58 crc kubenswrapper[4702]: E1125 10:48:58.159026 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist podName:92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf nodeName:}" failed. No retries permitted until 2025-11-25 10:48:59.159006088 +0000 UTC m=+1036.525601777 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist") pod "speaker-m5tjs" (UID: "92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf") : secret "metallb-memberlist" not found Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.162495 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-metrics-certs\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.162536 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/973ff76e-6e14-4f6e-a0cb-fc3e5af9b694-metrics-certs\") pod \"controller-6c7b4b5f48-2ck8z\" (UID: \"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694\") " pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.243181 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.403203 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 10:48:58 crc kubenswrapper[4702]: E1125 10:48:58.404067 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"manager\" with CrashLoopBackOff: \"back-off 40s restarting failed container=manager pod=mariadb-operator-controller-manager-656f99cfb7-jldk8_openstack-operators(c7e80c7d-91e3-4953-bf91-d35441e38743)\"" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.417273 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.641242 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw"] Nov 25 10:48:58 crc kubenswrapper[4702]: W1125 10:48:58.652772 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podeea53047_9f4a_400c_8db5_bcb0a8c08967.slice/crio-968c07f6acefb0ed3e8012c7001efe5dcb367380355d123a153f27042f9395ad WatchSource:0}: Error finding container 968c07f6acefb0ed3e8012c7001efe5dcb367380355d123a153f27042f9395ad: Status 404 returned error can't find the container with id 968c07f6acefb0ed3e8012c7001efe5dcb367380355d123a153f27042f9395ad Nov 25 10:48:58 crc kubenswrapper[4702]: I1125 10:48:58.653361 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6c7b4b5f48-2ck8z"] Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.174250 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.181611 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf-memberlist\") pod \"speaker-m5tjs\" (UID: \"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf\") " pod="metallb-system/speaker-m5tjs" Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.246169 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" event={"ID":"eea53047-9f4a-400c-8db5-bcb0a8c08967","Type":"ContainerStarted","Data":"968c07f6acefb0ed3e8012c7001efe5dcb367380355d123a153f27042f9395ad"} Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.247822 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"3cad66eace90aa527fe31b9b5dfd38a07d121aa4ed3d64818172a1db96679069"} Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.249487 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-2ck8z" event={"ID":"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694","Type":"ContainerStarted","Data":"7fce2b7e7ec38684f595ae2a6b7cec53c13c4fad2bb48859f7daea607ae782a5"} Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.249525 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-2ck8z" event={"ID":"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694","Type":"ContainerStarted","Data":"f459683f4f40bbadd1ae6267504ef06882471b1efc6e128862988ad7b449e565"} Nov 25 10:48:59 crc kubenswrapper[4702]: I1125 10:48:59.275164 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-m5tjs" Nov 25 10:48:59 crc kubenswrapper[4702]: W1125 10:48:59.297119 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod92bfef0f_1ea1_4d57_bd99_2f1b573d5ddf.slice/crio-fb215f4d3e2b0b179462e885f7b316111d43d2e9059b394829cfc953fb0077ee WatchSource:0}: Error finding container fb215f4d3e2b0b179462e885f7b316111d43d2e9059b394829cfc953fb0077ee: Status 404 returned error can't find the container with id fb215f4d3e2b0b179462e885f7b316111d43d2e9059b394829cfc953fb0077ee Nov 25 10:49:00 crc kubenswrapper[4702]: I1125 10:49:00.258571 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-m5tjs" event={"ID":"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf","Type":"ContainerStarted","Data":"d28918d4942bece2b8851d94cdc2fb90ebb1e98ca7984683e981f5785b100d2e"} Nov 25 10:49:00 crc kubenswrapper[4702]: I1125 10:49:00.258686 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-m5tjs" event={"ID":"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf","Type":"ContainerStarted","Data":"fb215f4d3e2b0b179462e885f7b316111d43d2e9059b394829cfc953fb0077ee"} Nov 25 10:49:02 crc kubenswrapper[4702]: I1125 10:49:02.274297 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-m5tjs" event={"ID":"92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf","Type":"ContainerStarted","Data":"aa40cb4cd78f378c43b29d05c4810d996e76edb34a354ea47783b9c04b495d79"} Nov 25 10:49:02 crc kubenswrapper[4702]: I1125 10:49:02.274814 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-m5tjs" Nov 25 10:49:02 crc kubenswrapper[4702]: I1125 10:49:02.276835 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6c7b4b5f48-2ck8z" event={"ID":"973ff76e-6e14-4f6e-a0cb-fc3e5af9b694","Type":"ContainerStarted","Data":"f064f5d8f3557a647511a2dacadb8ab68694c4d169f8b69cdfeea7e2bf972593"} Nov 25 10:49:02 crc kubenswrapper[4702]: I1125 10:49:02.278022 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:49:02 crc kubenswrapper[4702]: I1125 10:49:02.294124 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-m5tjs" podStartSLOduration=3.066995666 podStartE2EDuration="5.294106566s" podCreationTimestamp="2025-11-25 10:48:57 +0000 UTC" firstStartedPulling="2025-11-25 10:48:59.56288808 +0000 UTC m=+1036.929483769" lastFinishedPulling="2025-11-25 10:49:01.78999897 +0000 UTC m=+1039.156594669" observedRunningTime="2025-11-25 10:49:02.291850604 +0000 UTC m=+1039.658446303" watchObservedRunningTime="2025-11-25 10:49:02.294106566 +0000 UTC m=+1039.660702255" Nov 25 10:49:02 crc kubenswrapper[4702]: I1125 10:49:02.313840 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6c7b4b5f48-2ck8z" podStartSLOduration=2.364803482 podStartE2EDuration="5.313817627s" podCreationTimestamp="2025-11-25 10:48:57 +0000 UTC" firstStartedPulling="2025-11-25 10:48:58.828723529 +0000 UTC m=+1036.195319228" lastFinishedPulling="2025-11-25 10:49:01.777737684 +0000 UTC m=+1039.144333373" observedRunningTime="2025-11-25 10:49:02.307404621 +0000 UTC m=+1039.674000330" watchObservedRunningTime="2025-11-25 10:49:02.313817627 +0000 UTC m=+1039.680413346" Nov 25 10:49:06 crc kubenswrapper[4702]: I1125 10:49:06.307601 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" event={"ID":"eea53047-9f4a-400c-8db5-bcb0a8c08967","Type":"ContainerStarted","Data":"01b7824704704633bbfbd6e88607642af3d80076a12e5b8f2d191eb77d0f2a88"} Nov 25 10:49:06 crc kubenswrapper[4702]: I1125 10:49:06.308745 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:49:06 crc kubenswrapper[4702]: I1125 10:49:06.310949 4702 generic.go:334] "Generic (PLEG): container finished" podID="776790d4-3b26-4355-b007-928895d8abda" containerID="8a2ffe7a4919a274c5b5005eb69e0636e42c85a87c72b218dd64c32b5b02fd5a" exitCode=0 Nov 25 10:49:06 crc kubenswrapper[4702]: I1125 10:49:06.310987 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerDied","Data":"8a2ffe7a4919a274c5b5005eb69e0636e42c85a87c72b218dd64c32b5b02fd5a"} Nov 25 10:49:06 crc kubenswrapper[4702]: I1125 10:49:06.358889 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" podStartSLOduration=1.976016382 podStartE2EDuration="9.358872446s" podCreationTimestamp="2025-11-25 10:48:57 +0000 UTC" firstStartedPulling="2025-11-25 10:48:58.655375301 +0000 UTC m=+1036.021970990" lastFinishedPulling="2025-11-25 10:49:06.038231365 +0000 UTC m=+1043.404827054" observedRunningTime="2025-11-25 10:49:06.331285789 +0000 UTC m=+1043.697881478" watchObservedRunningTime="2025-11-25 10:49:06.358872446 +0000 UTC m=+1043.725468135" Nov 25 10:49:07 crc kubenswrapper[4702]: I1125 10:49:07.318575 4702 generic.go:334] "Generic (PLEG): container finished" podID="776790d4-3b26-4355-b007-928895d8abda" containerID="94bcf0166708cc4b50a6f7d6b3559ef63fc514b82e544ea45ee45197a812d176" exitCode=0 Nov 25 10:49:07 crc kubenswrapper[4702]: I1125 10:49:07.318785 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerDied","Data":"94bcf0166708cc4b50a6f7d6b3559ef63fc514b82e544ea45ee45197a812d176"} Nov 25 10:49:08 crc kubenswrapper[4702]: I1125 10:49:08.326952 4702 generic.go:334] "Generic (PLEG): container finished" podID="776790d4-3b26-4355-b007-928895d8abda" containerID="31ee8474bff58c13727e5fc5b5f1626f173d81cc05cc643fc26e8cbccd807bb3" exitCode=0 Nov 25 10:49:08 crc kubenswrapper[4702]: I1125 10:49:08.327061 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerDied","Data":"31ee8474bff58c13727e5fc5b5f1626f173d81cc05cc643fc26e8cbccd807bb3"} Nov 25 10:49:08 crc kubenswrapper[4702]: I1125 10:49:08.422728 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6c7b4b5f48-2ck8z" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.279546 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-m5tjs" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.339874 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"de28faf6a728117b4f147279c0a9f7bfc9ad95281461c4a0b31ec9064bce89c1"} Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.339942 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"a7652b64dad2caa42606424b2d9d59e7abc413ebd1769f35b6f5f68b49fefee9"} Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.339954 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"11e47f7ec76a96a8997799e076ea7d6bd78a4e600f30735a46246369b76e4e30"} Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.339964 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"9b77d642322e9d8370f51c7e92faa12ea31ea30032cec962c5c5797b723a83ba"} Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.339973 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"3fe72460311c084500a0d4cc54807143fc1822f94980ff9b6ad5b63528efe191"} Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.474911 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-4kxkd"] Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.475959 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.482403 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-fzmzr" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.486355 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-4kxkd"] Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.532001 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vfnhv\" (UniqueName: \"kubernetes.io/projected/a6277a43-9ecf-410f-bbed-5d7d90e49f09-kube-api-access-vfnhv\") pod \"infra-operator-index-4kxkd\" (UID: \"a6277a43-9ecf-410f-bbed-5d7d90e49f09\") " pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.633783 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vfnhv\" (UniqueName: \"kubernetes.io/projected/a6277a43-9ecf-410f-bbed-5d7d90e49f09-kube-api-access-vfnhv\") pod \"infra-operator-index-4kxkd\" (UID: \"a6277a43-9ecf-410f-bbed-5d7d90e49f09\") " pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.703609 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vfnhv\" (UniqueName: \"kubernetes.io/projected/a6277a43-9ecf-410f-bbed-5d7d90e49f09-kube-api-access-vfnhv\") pod \"infra-operator-index-4kxkd\" (UID: \"a6277a43-9ecf-410f-bbed-5d7d90e49f09\") " pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:09 crc kubenswrapper[4702]: I1125 10:49:09.796647 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:10 crc kubenswrapper[4702]: I1125 10:49:10.006357 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-4kxkd"] Nov 25 10:49:10 crc kubenswrapper[4702]: I1125 10:49:10.348918 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-rsjl6" event={"ID":"776790d4-3b26-4355-b007-928895d8abda","Type":"ContainerStarted","Data":"ad42b121125367fc89c37e2d7f117f9c09c8a01b5cfc2d19d7593fd6fff7c310"} Nov 25 10:49:10 crc kubenswrapper[4702]: I1125 10:49:10.349755 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:49:10 crc kubenswrapper[4702]: I1125 10:49:10.351170 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4kxkd" event={"ID":"a6277a43-9ecf-410f-bbed-5d7d90e49f09","Type":"ContainerStarted","Data":"7fe5a4dbfdcad8c4b762955458dbadabd760339c89cf24f3690a53b653f95975"} Nov 25 10:49:10 crc kubenswrapper[4702]: I1125 10:49:10.397418 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-rsjl6" podStartSLOduration=5.606526564 podStartE2EDuration="13.397399647s" podCreationTimestamp="2025-11-25 10:48:57 +0000 UTC" firstStartedPulling="2025-11-25 10:48:58.265460149 +0000 UTC m=+1035.632055838" lastFinishedPulling="2025-11-25 10:49:06.056333232 +0000 UTC m=+1043.422928921" observedRunningTime="2025-11-25 10:49:10.393588312 +0000 UTC m=+1047.760184001" watchObservedRunningTime="2025-11-25 10:49:10.397399647 +0000 UTC m=+1047.763995336" Nov 25 10:49:12 crc kubenswrapper[4702]: I1125 10:49:12.402694 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 10:49:12 crc kubenswrapper[4702]: I1125 10:49:12.456829 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-4kxkd"] Nov 25 10:49:12 crc kubenswrapper[4702]: I1125 10:49:12.687006 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:49:12 crc kubenswrapper[4702]: I1125 10:49:12.726577 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.063040 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-pm8nx"] Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.063787 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.074692 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-pm8nx"] Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.174746 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22cz5\" (UniqueName: \"kubernetes.io/projected/0a31b7b4-f333-4334-8940-873e0a462d72-kube-api-access-22cz5\") pod \"infra-operator-index-pm8nx\" (UID: \"0a31b7b4-f333-4334-8940-873e0a462d72\") " pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.281345 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22cz5\" (UniqueName: \"kubernetes.io/projected/0a31b7b4-f333-4334-8940-873e0a462d72-kube-api-access-22cz5\") pod \"infra-operator-index-pm8nx\" (UID: \"0a31b7b4-f333-4334-8940-873e0a462d72\") " pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.300884 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22cz5\" (UniqueName: \"kubernetes.io/projected/0a31b7b4-f333-4334-8940-873e0a462d72-kube-api-access-22cz5\") pod \"infra-operator-index-pm8nx\" (UID: \"0a31b7b4-f333-4334-8940-873e0a462d72\") " pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.369168 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerStarted","Data":"141dec66156e2f971af2e8c4ceda67d71beeee7771b69b099aed85f101a0ecb6"} Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.369618 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.381092 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:13 crc kubenswrapper[4702]: I1125 10:49:13.607307 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-pm8nx"] Nov 25 10:49:14 crc kubenswrapper[4702]: W1125 10:49:14.178885 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0a31b7b4_f333_4334_8940_873e0a462d72.slice/crio-9ccc9c90b436ce3ea98d76279dec629ace478efc9baf788f952aabd18f7665ca WatchSource:0}: Error finding container 9ccc9c90b436ce3ea98d76279dec629ace478efc9baf788f952aabd18f7665ca: Status 404 returned error can't find the container with id 9ccc9c90b436ce3ea98d76279dec629ace478efc9baf788f952aabd18f7665ca Nov 25 10:49:14 crc kubenswrapper[4702]: I1125 10:49:14.377722 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-pm8nx" event={"ID":"0a31b7b4-f333-4334-8940-873e0a462d72","Type":"ContainerStarted","Data":"9ccc9c90b436ce3ea98d76279dec629ace478efc9baf788f952aabd18f7665ca"} Nov 25 10:49:15 crc kubenswrapper[4702]: I1125 10:49:15.385671 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4kxkd" event={"ID":"a6277a43-9ecf-410f-bbed-5d7d90e49f09","Type":"ContainerStarted","Data":"7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e"} Nov 25 10:49:15 crc kubenswrapper[4702]: I1125 10:49:15.385795 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-4kxkd" podUID="a6277a43-9ecf-410f-bbed-5d7d90e49f09" containerName="registry-server" containerID="cri-o://7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e" gracePeriod=2 Nov 25 10:49:15 crc kubenswrapper[4702]: I1125 10:49:15.415466 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-4kxkd" podStartSLOduration=2.115903228 podStartE2EDuration="6.415444702s" podCreationTimestamp="2025-11-25 10:49:09 +0000 UTC" firstStartedPulling="2025-11-25 10:49:10.015058422 +0000 UTC m=+1047.381654111" lastFinishedPulling="2025-11-25 10:49:14.314599896 +0000 UTC m=+1051.681195585" observedRunningTime="2025-11-25 10:49:15.404675827 +0000 UTC m=+1052.771271526" watchObservedRunningTime="2025-11-25 10:49:15.415444702 +0000 UTC m=+1052.782040391" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.251588 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.324407 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vfnhv\" (UniqueName: \"kubernetes.io/projected/a6277a43-9ecf-410f-bbed-5d7d90e49f09-kube-api-access-vfnhv\") pod \"a6277a43-9ecf-410f-bbed-5d7d90e49f09\" (UID: \"a6277a43-9ecf-410f-bbed-5d7d90e49f09\") " Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.331164 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6277a43-9ecf-410f-bbed-5d7d90e49f09-kube-api-access-vfnhv" (OuterVolumeSpecName: "kube-api-access-vfnhv") pod "a6277a43-9ecf-410f-bbed-5d7d90e49f09" (UID: "a6277a43-9ecf-410f-bbed-5d7d90e49f09"). InnerVolumeSpecName "kube-api-access-vfnhv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.399352 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-pm8nx" event={"ID":"0a31b7b4-f333-4334-8940-873e0a462d72","Type":"ContainerStarted","Data":"dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b"} Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.402971 4702 generic.go:334] "Generic (PLEG): container finished" podID="a6277a43-9ecf-410f-bbed-5d7d90e49f09" containerID="7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e" exitCode=0 Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.403101 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-4kxkd" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.403092 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4kxkd" event={"ID":"a6277a43-9ecf-410f-bbed-5d7d90e49f09","Type":"ContainerDied","Data":"7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e"} Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.403185 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-4kxkd" event={"ID":"a6277a43-9ecf-410f-bbed-5d7d90e49f09","Type":"ContainerDied","Data":"7fe5a4dbfdcad8c4b762955458dbadabd760339c89cf24f3690a53b653f95975"} Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.403213 4702 scope.go:117] "RemoveContainer" containerID="7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.426925 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-pm8nx" podStartSLOduration=2.359486037 podStartE2EDuration="3.426881065s" podCreationTimestamp="2025-11-25 10:49:13 +0000 UTC" firstStartedPulling="2025-11-25 10:49:14.312416496 +0000 UTC m=+1051.679012185" lastFinishedPulling="2025-11-25 10:49:15.379811524 +0000 UTC m=+1052.746407213" observedRunningTime="2025-11-25 10:49:16.426869744 +0000 UTC m=+1053.793465453" watchObservedRunningTime="2025-11-25 10:49:16.426881065 +0000 UTC m=+1053.793476754" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.427112 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vfnhv\" (UniqueName: \"kubernetes.io/projected/a6277a43-9ecf-410f-bbed-5d7d90e49f09-kube-api-access-vfnhv\") on node \"crc\" DevicePath \"\"" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.436554 4702 scope.go:117] "RemoveContainer" containerID="7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e" Nov 25 10:49:16 crc kubenswrapper[4702]: E1125 10:49:16.437161 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e\": container with ID starting with 7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e not found: ID does not exist" containerID="7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.437188 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e"} err="failed to get container status \"7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e\": rpc error: code = NotFound desc = could not find container \"7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e\": container with ID starting with 7f6831f95cc93645ae154183a1eb219ca57019fff5ad7f06c1ccc978be2f499e not found: ID does not exist" Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.446203 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-4kxkd"] Nov 25 10:49:16 crc kubenswrapper[4702]: I1125 10:49:16.451161 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-4kxkd"] Nov 25 10:49:17 crc kubenswrapper[4702]: I1125 10:49:17.409570 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a6277a43-9ecf-410f-bbed-5d7d90e49f09" path="/var/lib/kubelet/pods/a6277a43-9ecf-410f-bbed-5d7d90e49f09/volumes" Nov 25 10:49:18 crc kubenswrapper[4702]: I1125 10:49:18.248917 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-6998585d5-z5pzw" Nov 25 10:49:22 crc kubenswrapper[4702]: I1125 10:49:22.986389 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 10:49:23 crc kubenswrapper[4702]: I1125 10:49:23.382243 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:23 crc kubenswrapper[4702]: I1125 10:49:23.383321 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:23 crc kubenswrapper[4702]: I1125 10:49:23.412836 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:23 crc kubenswrapper[4702]: I1125 10:49:23.467980 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.691959 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql"] Nov 25 10:49:25 crc kubenswrapper[4702]: E1125 10:49:25.692213 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6277a43-9ecf-410f-bbed-5d7d90e49f09" containerName="registry-server" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.692227 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6277a43-9ecf-410f-bbed-5d7d90e49f09" containerName="registry-server" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.692320 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6277a43-9ecf-410f-bbed-5d7d90e49f09" containerName="registry-server" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.693017 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.696670 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-wkv7r" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.707173 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql"] Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.750695 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.750803 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.750872 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vrqn9\" (UniqueName: \"kubernetes.io/projected/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-kube-api-access-vrqn9\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.852271 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.852335 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.852380 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vrqn9\" (UniqueName: \"kubernetes.io/projected/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-kube-api-access-vrqn9\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.852862 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-bundle\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.852942 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-util\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:25 crc kubenswrapper[4702]: I1125 10:49:25.886228 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vrqn9\" (UniqueName: \"kubernetes.io/projected/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-kube-api-access-vrqn9\") pod \"5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:26 crc kubenswrapper[4702]: I1125 10:49:26.011568 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:26 crc kubenswrapper[4702]: I1125 10:49:26.294889 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql"] Nov 25 10:49:26 crc kubenswrapper[4702]: I1125 10:49:26.465976 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" event={"ID":"81a8be8b-a635-4cf1-a6a9-8b77e4b55426","Type":"ContainerStarted","Data":"a9526521377c399573c98f219235648c47958ad379c3d081dbec46d2d5d608b8"} Nov 25 10:49:27 crc kubenswrapper[4702]: I1125 10:49:27.473026 4702 generic.go:334] "Generic (PLEG): container finished" podID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerID="53feb5236c141b53846c60a8d6997e271738abe1b58cb46391271de7086b5e8c" exitCode=0 Nov 25 10:49:27 crc kubenswrapper[4702]: I1125 10:49:27.473121 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" event={"ID":"81a8be8b-a635-4cf1-a6a9-8b77e4b55426","Type":"ContainerDied","Data":"53feb5236c141b53846c60a8d6997e271738abe1b58cb46391271de7086b5e8c"} Nov 25 10:49:27 crc kubenswrapper[4702]: I1125 10:49:27.689450 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-rsjl6" Nov 25 10:49:28 crc kubenswrapper[4702]: I1125 10:49:28.482488 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" event={"ID":"81a8be8b-a635-4cf1-a6a9-8b77e4b55426","Type":"ContainerDied","Data":"4405ae38d3adb909c25d5148edb64cf6bce1466016b42e53282f1b1d188b9ca8"} Nov 25 10:49:28 crc kubenswrapper[4702]: I1125 10:49:28.482419 4702 generic.go:334] "Generic (PLEG): container finished" podID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerID="4405ae38d3adb909c25d5148edb64cf6bce1466016b42e53282f1b1d188b9ca8" exitCode=0 Nov 25 10:49:29 crc kubenswrapper[4702]: I1125 10:49:29.490568 4702 generic.go:334] "Generic (PLEG): container finished" podID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerID="ea8e88f760c62548da8284edc88970a6fb35142659b77aa01649850b0373e80f" exitCode=0 Nov 25 10:49:29 crc kubenswrapper[4702]: I1125 10:49:29.490643 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" event={"ID":"81a8be8b-a635-4cf1-a6a9-8b77e4b55426","Type":"ContainerDied","Data":"ea8e88f760c62548da8284edc88970a6fb35142659b77aa01649850b0373e80f"} Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.724684 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.814457 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-bundle\") pod \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.814555 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vrqn9\" (UniqueName: \"kubernetes.io/projected/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-kube-api-access-vrqn9\") pod \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.814692 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-util\") pod \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\" (UID: \"81a8be8b-a635-4cf1-a6a9-8b77e4b55426\") " Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.815782 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-bundle" (OuterVolumeSpecName: "bundle") pod "81a8be8b-a635-4cf1-a6a9-8b77e4b55426" (UID: "81a8be8b-a635-4cf1-a6a9-8b77e4b55426"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.821505 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-kube-api-access-vrqn9" (OuterVolumeSpecName: "kube-api-access-vrqn9") pod "81a8be8b-a635-4cf1-a6a9-8b77e4b55426" (UID: "81a8be8b-a635-4cf1-a6a9-8b77e4b55426"). InnerVolumeSpecName "kube-api-access-vrqn9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.831726 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-util" (OuterVolumeSpecName: "util") pod "81a8be8b-a635-4cf1-a6a9-8b77e4b55426" (UID: "81a8be8b-a635-4cf1-a6a9-8b77e4b55426"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.915809 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vrqn9\" (UniqueName: \"kubernetes.io/projected/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-kube-api-access-vrqn9\") on node \"crc\" DevicePath \"\"" Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.915856 4702 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-util\") on node \"crc\" DevicePath \"\"" Nov 25 10:49:30 crc kubenswrapper[4702]: I1125 10:49:30.915871 4702 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/81a8be8b-a635-4cf1-a6a9-8b77e4b55426-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:49:31 crc kubenswrapper[4702]: I1125 10:49:31.518726 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" event={"ID":"81a8be8b-a635-4cf1-a6a9-8b77e4b55426","Type":"ContainerDied","Data":"a9526521377c399573c98f219235648c47958ad379c3d081dbec46d2d5d608b8"} Nov 25 10:49:31 crc kubenswrapper[4702]: I1125 10:49:31.518770 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a9526521377c399573c98f219235648c47958ad379c3d081dbec46d2d5d608b8" Nov 25 10:49:31 crc kubenswrapper[4702]: I1125 10:49:31.518804 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.638358 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj"] Nov 25 10:49:39 crc kubenswrapper[4702]: E1125 10:49:39.640018 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="util" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.640093 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="util" Nov 25 10:49:39 crc kubenswrapper[4702]: E1125 10:49:39.640150 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="extract" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.640235 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="extract" Nov 25 10:49:39 crc kubenswrapper[4702]: E1125 10:49:39.640299 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="pull" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.640364 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="pull" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.640537 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" containerName="extract" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.641299 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.643867 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.644320 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-q266z" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.662838 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj"] Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.729191 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-apiservice-cert\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.729259 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hbpkr\" (UniqueName: \"kubernetes.io/projected/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-kube-api-access-hbpkr\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.729295 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-webhook-cert\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.830689 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-apiservice-cert\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.830752 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hbpkr\" (UniqueName: \"kubernetes.io/projected/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-kube-api-access-hbpkr\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.830784 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-webhook-cert\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.838954 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-apiservice-cert\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.842100 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-webhook-cert\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.847195 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hbpkr\" (UniqueName: \"kubernetes.io/projected/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-kube-api-access-hbpkr\") pod \"infra-operator-controller-manager-647cf9c56-wn9rj\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:39 crc kubenswrapper[4702]: I1125 10:49:39.967062 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:40 crc kubenswrapper[4702]: I1125 10:49:40.182273 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj"] Nov 25 10:49:40 crc kubenswrapper[4702]: I1125 10:49:40.572045 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" event={"ID":"a79a35ba-b1cc-4c3b-bf33-43ff1af46972","Type":"ContainerStarted","Data":"56333537dfe5e2df48c43392870b0dd22de86314f523847238b06b93f4756820"} Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.736714 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.738265 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.748954 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.752023 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openshift-service-ca.crt" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.752077 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"galera-openstack-dockercfg-r7ljp" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.758682 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.759834 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.760504 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"kube-root-ca.crt" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.761492 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openstack-scripts" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.766506 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openstack-config-data" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.769738 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.772389 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.798497 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.842135 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.858737 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.858799 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78dea2f0-701b-4124-8def-a3c353705d62-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.858839 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-config-data-default\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.858874 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-kolla-config\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.858932 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.858979 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6q7j\" (UniqueName: \"kubernetes.io/projected/78dea2f0-701b-4124-8def-a3c353705d62-kube-api-access-g6q7j\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959640 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fh45j\" (UniqueName: \"kubernetes.io/projected/a3a68041-1390-4922-81b3-ca65322db681-kube-api-access-fh45j\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959687 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-kolla-config\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959710 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-kolla-config\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959737 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959784 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78dea2f0-701b-4124-8def-a3c353705d62-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959806 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-operator-scripts\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959829 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-config-data-default\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959854 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-generated\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959885 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-kolla-config\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959948 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-default\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.959982 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a3a68041-1390-4922-81b3-ca65322db681-config-data-generated\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960007 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960032 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960064 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-operator-scripts\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960071 4702 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") device mount path \"/mnt/openstack/pv06\"" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960092 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n2sjh\" (UniqueName: \"kubernetes.io/projected/f3415a28-7b55-4649-94fd-9b976e6919d7-kube-api-access-n2sjh\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960118 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960145 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-config-data-default\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960172 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6q7j\" (UniqueName: \"kubernetes.io/projected/78dea2f0-701b-4124-8def-a3c353705d62-kube-api-access-g6q7j\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.960833 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78dea2f0-701b-4124-8def-a3c353705d62-config-data-generated\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.961556 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-config-data-default\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.961970 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-operator-scripts\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.962092 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-kolla-config\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.982578 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6q7j\" (UniqueName: \"kubernetes.io/projected/78dea2f0-701b-4124-8def-a3c353705d62-kube-api-access-g6q7j\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:41 crc kubenswrapper[4702]: I1125 10:49:41.983461 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"openstack-galera-0\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061307 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-default\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061377 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a3a68041-1390-4922-81b3-ca65322db681-config-data-generated\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061403 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061432 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-operator-scripts\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061466 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n2sjh\" (UniqueName: \"kubernetes.io/projected/f3415a28-7b55-4649-94fd-9b976e6919d7-kube-api-access-n2sjh\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061488 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061517 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-config-data-default\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061564 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fh45j\" (UniqueName: \"kubernetes.io/projected/a3a68041-1390-4922-81b3-ca65322db681-kube-api-access-fh45j\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061594 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-kolla-config\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061626 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-kolla-config\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061661 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-operator-scripts\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.061689 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-generated\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.062010 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a3a68041-1390-4922-81b3-ca65322db681-config-data-generated\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.062090 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-generated\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.062253 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-default\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.062805 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-config-data-default\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.062937 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-kolla-config\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.063256 4702 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") device mount path \"/mnt/openstack/pv01\"" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.063398 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-kolla-config\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.063463 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-operator-scripts\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.063560 4702 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") device mount path \"/mnt/openstack/pv03\"" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.064500 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-operator-scripts\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.071660 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.080403 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fh45j\" (UniqueName: \"kubernetes.io/projected/a3a68041-1390-4922-81b3-ca65322db681-kube-api-access-fh45j\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.081766 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.083437 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n2sjh\" (UniqueName: \"kubernetes.io/projected/f3415a28-7b55-4649-94fd-9b976e6919d7-kube-api-access-n2sjh\") pod \"openstack-galera-1\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.086821 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-2\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.090616 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.103005 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.362515 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Nov 25 10:49:42 crc kubenswrapper[4702]: W1125 10:49:42.366834 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod78dea2f0_701b_4124_8def_a3c353705d62.slice/crio-be605fbc8731375b19d5f0d92ffd0d5141c80cbfad9c6ca61dd8ce54018ef636 WatchSource:0}: Error finding container be605fbc8731375b19d5f0d92ffd0d5141c80cbfad9c6ca61dd8ce54018ef636: Status 404 returned error can't find the container with id be605fbc8731375b19d5f0d92ffd0d5141c80cbfad9c6ca61dd8ce54018ef636 Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.397516 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.506386 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Nov 25 10:49:42 crc kubenswrapper[4702]: W1125 10:49:42.511878 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3a68041_1390_4922_81b3_ca65322db681.slice/crio-30ff0201de893c87705f437d0bbc5fe37c402d7d7682e986d2215dac39c35787 WatchSource:0}: Error finding container 30ff0201de893c87705f437d0bbc5fe37c402d7d7682e986d2215dac39c35787: Status 404 returned error can't find the container with id 30ff0201de893c87705f437d0bbc5fe37c402d7d7682e986d2215dac39c35787 Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.602544 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" event={"ID":"a79a35ba-b1cc-4c3b-bf33-43ff1af46972","Type":"ContainerStarted","Data":"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5"} Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.603665 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"f3415a28-7b55-4649-94fd-9b976e6919d7","Type":"ContainerStarted","Data":"4444271ffdad6db0b45be0f5e5497ee89d779bced028479dcacb33fe5718327a"} Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.604437 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"78dea2f0-701b-4124-8def-a3c353705d62","Type":"ContainerStarted","Data":"be605fbc8731375b19d5f0d92ffd0d5141c80cbfad9c6ca61dd8ce54018ef636"} Nov 25 10:49:42 crc kubenswrapper[4702]: I1125 10:49:42.605202 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"a3a68041-1390-4922-81b3-ca65322db681","Type":"ContainerStarted","Data":"30ff0201de893c87705f437d0bbc5fe37c402d7d7682e986d2215dac39c35787"} Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.662995 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"a3a68041-1390-4922-81b3-ca65322db681","Type":"ContainerStarted","Data":"557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696"} Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.665367 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" event={"ID":"a79a35ba-b1cc-4c3b-bf33-43ff1af46972","Type":"ContainerStarted","Data":"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017"} Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.666331 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.669040 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"f3415a28-7b55-4649-94fd-9b976e6919d7","Type":"ContainerStarted","Data":"d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a"} Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.670975 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"78dea2f0-701b-4124-8def-a3c353705d62","Type":"ContainerStarted","Data":"9e3933617a4a19f8c19bc8c3eb6325b38ff22343c5a08d2f2ad2515a7065534e"} Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.671501 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 10:49:51 crc kubenswrapper[4702]: I1125 10:49:51.747194 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" podStartSLOduration=1.716799722 podStartE2EDuration="12.747177204s" podCreationTimestamp="2025-11-25 10:49:39 +0000 UTC" firstStartedPulling="2025-11-25 10:49:40.200874671 +0000 UTC m=+1077.567470360" lastFinishedPulling="2025-11-25 10:49:51.231252153 +0000 UTC m=+1088.597847842" observedRunningTime="2025-11-25 10:49:51.745183049 +0000 UTC m=+1089.111778758" watchObservedRunningTime="2025-11-25 10:49:51.747177204 +0000 UTC m=+1089.113772893" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.110591 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/memcached-0"] Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.112323 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.116192 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"memcached-memcached-dockercfg-md9bb" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.120737 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.121669 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"memcached-config-data" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.274575 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-kolla-config\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.274806 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nbjt4\" (UniqueName: \"kubernetes.io/projected/438d54fb-aec3-476a-ae67-1d906854d271-kube-api-access-nbjt4\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.274852 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-config-data\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.376012 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-kolla-config\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.376111 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nbjt4\" (UniqueName: \"kubernetes.io/projected/438d54fb-aec3-476a-ae67-1d906854d271-kube-api-access-nbjt4\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.376135 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-config-data\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.376923 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-config-data\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.377420 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-kolla-config\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.412446 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nbjt4\" (UniqueName: \"kubernetes.io/projected/438d54fb-aec3-476a-ae67-1d906854d271-kube-api-access-nbjt4\") pod \"memcached-0\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.430399 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:53 crc kubenswrapper[4702]: I1125 10:49:53.709682 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Nov 25 10:49:54 crc kubenswrapper[4702]: I1125 10:49:54.695771 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"438d54fb-aec3-476a-ae67-1d906854d271","Type":"ContainerStarted","Data":"47c44fc9c00baab154ffa6e2e6fc76d926f97c42c2ddab11905a827af1ea1bbd"} Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.703974 4702 generic.go:334] "Generic (PLEG): container finished" podID="a3a68041-1390-4922-81b3-ca65322db681" containerID="557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696" exitCode=0 Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.704101 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"a3a68041-1390-4922-81b3-ca65322db681","Type":"ContainerDied","Data":"557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696"} Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.705774 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"438d54fb-aec3-476a-ae67-1d906854d271","Type":"ContainerStarted","Data":"94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b"} Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.706220 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/memcached-0" Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.707974 4702 generic.go:334] "Generic (PLEG): container finished" podID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerID="d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a" exitCode=0 Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.708052 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"f3415a28-7b55-4649-94fd-9b976e6919d7","Type":"ContainerDied","Data":"d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a"} Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.711362 4702 generic.go:334] "Generic (PLEG): container finished" podID="78dea2f0-701b-4124-8def-a3c353705d62" containerID="9e3933617a4a19f8c19bc8c3eb6325b38ff22343c5a08d2f2ad2515a7065534e" exitCode=0 Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.711410 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"78dea2f0-701b-4124-8def-a3c353705d62","Type":"ContainerDied","Data":"9e3933617a4a19f8c19bc8c3eb6325b38ff22343c5a08d2f2ad2515a7065534e"} Nov 25 10:49:55 crc kubenswrapper[4702]: I1125 10:49:55.785991 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/memcached-0" podStartSLOduration=1.134060629 podStartE2EDuration="2.785971251s" podCreationTimestamp="2025-11-25 10:49:53 +0000 UTC" firstStartedPulling="2025-11-25 10:49:53.717605518 +0000 UTC m=+1091.084201207" lastFinishedPulling="2025-11-25 10:49:55.36951614 +0000 UTC m=+1092.736111829" observedRunningTime="2025-11-25 10:49:55.784080969 +0000 UTC m=+1093.150676668" watchObservedRunningTime="2025-11-25 10:49:55.785971251 +0000 UTC m=+1093.152566950" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.063125 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tvc8c"] Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.064066 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.067354 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-mpq7s" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.079032 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tvc8c"] Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.112109 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjmn8\" (UniqueName: \"kubernetes.io/projected/3481aa1c-b03e-4f6c-af07-23e9b6633f05-kube-api-access-sjmn8\") pod \"rabbitmq-cluster-operator-index-tvc8c\" (UID: \"3481aa1c-b03e-4f6c-af07-23e9b6633f05\") " pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.213258 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjmn8\" (UniqueName: \"kubernetes.io/projected/3481aa1c-b03e-4f6c-af07-23e9b6633f05-kube-api-access-sjmn8\") pod \"rabbitmq-cluster-operator-index-tvc8c\" (UID: \"3481aa1c-b03e-4f6c-af07-23e9b6633f05\") " pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.234225 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjmn8\" (UniqueName: \"kubernetes.io/projected/3481aa1c-b03e-4f6c-af07-23e9b6633f05-kube-api-access-sjmn8\") pod \"rabbitmq-cluster-operator-index-tvc8c\" (UID: \"3481aa1c-b03e-4f6c-af07-23e9b6633f05\") " pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.379302 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.681026 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tvc8c"] Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.720466 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"a3a68041-1390-4922-81b3-ca65322db681","Type":"ContainerStarted","Data":"e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635"} Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.726389 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" event={"ID":"3481aa1c-b03e-4f6c-af07-23e9b6633f05","Type":"ContainerStarted","Data":"9c26860b5eac44744ff59c6241e5b06aa19c22bedccb6f4f32af71ef5401f68c"} Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.729652 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"f3415a28-7b55-4649-94fd-9b976e6919d7","Type":"ContainerStarted","Data":"5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970"} Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.732960 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"78dea2f0-701b-4124-8def-a3c353705d62","Type":"ContainerStarted","Data":"b12c1d8415b206ae55f9b0eef6e039d2d6e9349fe58bc1f24a1d752a43759216"} Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.774642 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstack-galera-0" podStartSLOduration=7.907197283 podStartE2EDuration="16.774621156s" podCreationTimestamp="2025-11-25 10:49:40 +0000 UTC" firstStartedPulling="2025-11-25 10:49:42.36930913 +0000 UTC m=+1079.735904809" lastFinishedPulling="2025-11-25 10:49:51.236732993 +0000 UTC m=+1088.603328682" observedRunningTime="2025-11-25 10:49:56.773063844 +0000 UTC m=+1094.139659553" watchObservedRunningTime="2025-11-25 10:49:56.774621156 +0000 UTC m=+1094.141216845" Nov 25 10:49:56 crc kubenswrapper[4702]: I1125 10:49:56.777567 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstack-galera-2" podStartSLOduration=8.061591021 podStartE2EDuration="16.777560247s" podCreationTimestamp="2025-11-25 10:49:40 +0000 UTC" firstStartedPulling="2025-11-25 10:49:42.514885256 +0000 UTC m=+1079.881480945" lastFinishedPulling="2025-11-25 10:49:51.230854482 +0000 UTC m=+1088.597450171" observedRunningTime="2025-11-25 10:49:56.748138649 +0000 UTC m=+1094.114734348" watchObservedRunningTime="2025-11-25 10:49:56.777560247 +0000 UTC m=+1094.144155936" Nov 25 10:50:00 crc kubenswrapper[4702]: I1125 10:50:00.256701 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstack-galera-1" podStartSLOduration=11.475893566 podStartE2EDuration="20.256677941s" podCreationTimestamp="2025-11-25 10:49:40 +0000 UTC" firstStartedPulling="2025-11-25 10:49:42.404398733 +0000 UTC m=+1079.770994422" lastFinishedPulling="2025-11-25 10:49:51.185183108 +0000 UTC m=+1088.551778797" observedRunningTime="2025-11-25 10:49:56.798511302 +0000 UTC m=+1094.165106991" watchObservedRunningTime="2025-11-25 10:50:00.256677941 +0000 UTC m=+1097.623273640" Nov 25 10:50:00 crc kubenswrapper[4702]: I1125 10:50:00.258779 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tvc8c"] Nov 25 10:50:00 crc kubenswrapper[4702]: I1125 10:50:00.863492 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-qpv8l"] Nov 25 10:50:00 crc kubenswrapper[4702]: I1125 10:50:00.864368 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:00 crc kubenswrapper[4702]: I1125 10:50:00.881097 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-qpv8l"] Nov 25 10:50:00 crc kubenswrapper[4702]: I1125 10:50:00.981476 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f9wg\" (UniqueName: \"kubernetes.io/projected/30210a41-8b19-4210-950f-5035af734552-kube-api-access-6f9wg\") pod \"rabbitmq-cluster-operator-index-qpv8l\" (UID: \"30210a41-8b19-4210-950f-5035af734552\") " pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.082499 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f9wg\" (UniqueName: \"kubernetes.io/projected/30210a41-8b19-4210-950f-5035af734552-kube-api-access-6f9wg\") pod \"rabbitmq-cluster-operator-index-qpv8l\" (UID: \"30210a41-8b19-4210-950f-5035af734552\") " pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.106048 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6f9wg\" (UniqueName: \"kubernetes.io/projected/30210a41-8b19-4210-950f-5035af734552-kube-api-access-6f9wg\") pod \"rabbitmq-cluster-operator-index-qpv8l\" (UID: \"30210a41-8b19-4210-950f-5035af734552\") " pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.188582 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.719421 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-qpv8l"] Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.767420 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" event={"ID":"3481aa1c-b03e-4f6c-af07-23e9b6633f05","Type":"ContainerStarted","Data":"9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce"} Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.767523 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" podUID="3481aa1c-b03e-4f6c-af07-23e9b6633f05" containerName="registry-server" containerID="cri-o://9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce" gracePeriod=2 Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.769150 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" event={"ID":"30210a41-8b19-4210-950f-5035af734552","Type":"ContainerStarted","Data":"52bc1be2ebb6f61ff376ad3b2f72abb7fb78566efa9f11f999f8a55f05ebc741"} Nov 25 10:50:01 crc kubenswrapper[4702]: I1125 10:50:01.789873 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" podStartSLOduration=1.0328864150000001 podStartE2EDuration="5.789855824s" podCreationTimestamp="2025-11-25 10:49:56 +0000 UTC" firstStartedPulling="2025-11-25 10:49:56.707143874 +0000 UTC m=+1094.073739563" lastFinishedPulling="2025-11-25 10:50:01.464113283 +0000 UTC m=+1098.830708972" observedRunningTime="2025-11-25 10:50:01.786685077 +0000 UTC m=+1099.153280776" watchObservedRunningTime="2025-11-25 10:50:01.789855824 +0000 UTC m=+1099.156451513" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.072060 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.072130 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.091354 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.091414 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.100536 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.103577 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.103681 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.194674 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjmn8\" (UniqueName: \"kubernetes.io/projected/3481aa1c-b03e-4f6c-af07-23e9b6633f05-kube-api-access-sjmn8\") pod \"3481aa1c-b03e-4f6c-af07-23e9b6633f05\" (UID: \"3481aa1c-b03e-4f6c-af07-23e9b6633f05\") " Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.201160 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3481aa1c-b03e-4f6c-af07-23e9b6633f05-kube-api-access-sjmn8" (OuterVolumeSpecName: "kube-api-access-sjmn8") pod "3481aa1c-b03e-4f6c-af07-23e9b6633f05" (UID: "3481aa1c-b03e-4f6c-af07-23e9b6633f05"). InnerVolumeSpecName "kube-api-access-sjmn8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.296131 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjmn8\" (UniqueName: \"kubernetes.io/projected/3481aa1c-b03e-4f6c-af07-23e9b6633f05-kube-api-access-sjmn8\") on node \"crc\" DevicePath \"\"" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.776821 4702 generic.go:334] "Generic (PLEG): container finished" podID="3481aa1c-b03e-4f6c-af07-23e9b6633f05" containerID="9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce" exitCode=0 Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.776884 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.776885 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" event={"ID":"3481aa1c-b03e-4f6c-af07-23e9b6633f05","Type":"ContainerDied","Data":"9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce"} Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.777351 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-tvc8c" event={"ID":"3481aa1c-b03e-4f6c-af07-23e9b6633f05","Type":"ContainerDied","Data":"9c26860b5eac44744ff59c6241e5b06aa19c22bedccb6f4f32af71ef5401f68c"} Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.777391 4702 scope.go:117] "RemoveContainer" containerID="9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.778998 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" event={"ID":"30210a41-8b19-4210-950f-5035af734552","Type":"ContainerStarted","Data":"67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1"} Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.797852 4702 scope.go:117] "RemoveContainer" containerID="9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce" Nov 25 10:50:08 crc kubenswrapper[4702]: E1125 10:50:02.798688 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce\": container with ID starting with 9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce not found: ID does not exist" containerID="9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.798733 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce"} err="failed to get container status \"9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce\": rpc error: code = NotFound desc = could not find container \"9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce\": container with ID starting with 9dc5a14fbad44994615ba96fc67af100873b4c77d7a2e2a2d6016794188d49ce not found: ID does not exist" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.808790 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tvc8c"] Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:02.813653 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-tvc8c"] Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:03.410966 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3481aa1c-b03e-4f6c-af07-23e9b6633f05" path="/var/lib/kubelet/pods/3481aa1c-b03e-4f6c-af07-23e9b6633f05/volumes" Nov 25 10:50:08 crc kubenswrapper[4702]: I1125 10:50:03.432860 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/memcached-0" Nov 25 10:50:10 crc kubenswrapper[4702]: I1125 10:50:10.822250 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:50:10 crc kubenswrapper[4702]: I1125 10:50:10.845488 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" podStartSLOduration=10.09930845 podStartE2EDuration="10.845470031s" podCreationTimestamp="2025-11-25 10:50:00 +0000 UTC" firstStartedPulling="2025-11-25 10:50:01.728245503 +0000 UTC m=+1099.094841182" lastFinishedPulling="2025-11-25 10:50:02.474407074 +0000 UTC m=+1099.841002763" observedRunningTime="2025-11-25 10:50:03.801242411 +0000 UTC m=+1101.167838100" watchObservedRunningTime="2025-11-25 10:50:10.845470031 +0000 UTC m=+1108.212065720" Nov 25 10:50:10 crc kubenswrapper[4702]: I1125 10:50:10.900022 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 10:50:11 crc kubenswrapper[4702]: I1125 10:50:11.189106 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:11 crc kubenswrapper[4702]: I1125 10:50:11.189149 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:11 crc kubenswrapper[4702]: I1125 10:50:11.213512 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:11 crc kubenswrapper[4702]: I1125 10:50:11.866069 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.111857 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p"] Nov 25 10:50:14 crc kubenswrapper[4702]: E1125 10:50:14.114473 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3481aa1c-b03e-4f6c-af07-23e9b6633f05" containerName="registry-server" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.114741 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="3481aa1c-b03e-4f6c-af07-23e9b6633f05" containerName="registry-server" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.115688 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="3481aa1c-b03e-4f6c-af07-23e9b6633f05" containerName="registry-server" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.134254 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p"] Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.135083 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.139424 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-wkv7r" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.262756 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.262926 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.263047 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gz2rc\" (UniqueName: \"kubernetes.io/projected/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-kube-api-access-gz2rc\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.364833 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.364916 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.364961 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gz2rc\" (UniqueName: \"kubernetes.io/projected/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-kube-api-access-gz2rc\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.365466 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.365554 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.386005 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gz2rc\" (UniqueName: \"kubernetes.io/projected/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-kube-api-access-gz2rc\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.459876 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:14 crc kubenswrapper[4702]: I1125 10:50:14.911232 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p"] Nov 25 10:50:15 crc kubenswrapper[4702]: I1125 10:50:15.865324 4702 generic.go:334] "Generic (PLEG): container finished" podID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerID="161c5c26cba12480bc73ce12a341f3c734f9ce20a4a19f289b6215e135a39fbb" exitCode=0 Nov 25 10:50:15 crc kubenswrapper[4702]: I1125 10:50:15.865684 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" event={"ID":"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6","Type":"ContainerDied","Data":"161c5c26cba12480bc73ce12a341f3c734f9ce20a4a19f289b6215e135a39fbb"} Nov 25 10:50:15 crc kubenswrapper[4702]: I1125 10:50:15.865716 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" event={"ID":"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6","Type":"ContainerStarted","Data":"28e2de23dcb98a56415caa82dc2569f2edb5b63f9281bdbb9074fc8b240af758"} Nov 25 10:50:15 crc kubenswrapper[4702]: I1125 10:50:15.867321 4702 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 10:50:16 crc kubenswrapper[4702]: I1125 10:50:16.875610 4702 generic.go:334] "Generic (PLEG): container finished" podID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerID="73d5e34f3c755fe0cae9dbe43a00fd1a0393ff2c472bbc236f378ba6173b1d7a" exitCode=0 Nov 25 10:50:16 crc kubenswrapper[4702]: I1125 10:50:16.875698 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" event={"ID":"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6","Type":"ContainerDied","Data":"73d5e34f3c755fe0cae9dbe43a00fd1a0393ff2c472bbc236f378ba6173b1d7a"} Nov 25 10:50:17 crc kubenswrapper[4702]: I1125 10:50:17.883957 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" event={"ID":"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6","Type":"ContainerStarted","Data":"dbeaddab60fbe6ea747939d3bac827c3af7a2019ecc90edf27e6752bd86558bb"} Nov 25 10:50:18 crc kubenswrapper[4702]: I1125 10:50:18.893880 4702 generic.go:334] "Generic (PLEG): container finished" podID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerID="dbeaddab60fbe6ea747939d3bac827c3af7a2019ecc90edf27e6752bd86558bb" exitCode=0 Nov 25 10:50:18 crc kubenswrapper[4702]: I1125 10:50:18.893944 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" event={"ID":"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6","Type":"ContainerDied","Data":"dbeaddab60fbe6ea747939d3bac827c3af7a2019ecc90edf27e6752bd86558bb"} Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.168119 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.240365 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gz2rc\" (UniqueName: \"kubernetes.io/projected/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-kube-api-access-gz2rc\") pod \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.240751 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-util\") pod \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.240790 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-bundle\") pod \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\" (UID: \"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6\") " Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.241705 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-bundle" (OuterVolumeSpecName: "bundle") pod "4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" (UID: "4ee32898-9ad6-4bca-9bd8-b12d7bb291a6"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.242081 4702 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.250232 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-kube-api-access-gz2rc" (OuterVolumeSpecName: "kube-api-access-gz2rc") pod "4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" (UID: "4ee32898-9ad6-4bca-9bd8-b12d7bb291a6"). InnerVolumeSpecName "kube-api-access-gz2rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.250929 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-util" (OuterVolumeSpecName: "util") pod "4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" (UID: "4ee32898-9ad6-4bca-9bd8-b12d7bb291a6"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.343864 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gz2rc\" (UniqueName: \"kubernetes.io/projected/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-kube-api-access-gz2rc\") on node \"crc\" DevicePath \"\"" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.343925 4702 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6-util\") on node \"crc\" DevicePath \"\"" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.589520 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.661070 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.909645 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" event={"ID":"4ee32898-9ad6-4bca-9bd8-b12d7bb291a6","Type":"ContainerDied","Data":"28e2de23dcb98a56415caa82dc2569f2edb5b63f9281bdbb9074fc8b240af758"} Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.909706 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="28e2de23dcb98a56415caa82dc2569f2edb5b63f9281bdbb9074fc8b240af758" Nov 25 10:50:20 crc kubenswrapper[4702]: I1125 10:50:20.909727 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p" Nov 25 10:50:24 crc kubenswrapper[4702]: I1125 10:50:24.680105 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:50:24 crc kubenswrapper[4702]: I1125 10:50:24.760023 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.167691 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9"] Nov 25 10:50:29 crc kubenswrapper[4702]: E1125 10:50:29.168370 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="util" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.168383 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="util" Nov 25 10:50:29 crc kubenswrapper[4702]: E1125 10:50:29.168401 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="pull" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.168406 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="pull" Nov 25 10:50:29 crc kubenswrapper[4702]: E1125 10:50:29.168414 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="extract" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.168420 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="extract" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.168522 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" containerName="extract" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.168924 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.171403 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-7gmk8" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.175666 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lppj9\" (UniqueName: \"kubernetes.io/projected/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17-kube-api-access-lppj9\") pod \"rabbitmq-cluster-operator-779fc9694b-hzqw9\" (UID: \"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.178955 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9"] Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.276930 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lppj9\" (UniqueName: \"kubernetes.io/projected/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17-kube-api-access-lppj9\") pod \"rabbitmq-cluster-operator-779fc9694b-hzqw9\" (UID: \"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.296797 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lppj9\" (UniqueName: \"kubernetes.io/projected/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17-kube-api-access-lppj9\") pod \"rabbitmq-cluster-operator-779fc9694b-hzqw9\" (UID: \"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.490233 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.908265 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9"] Nov 25 10:50:29 crc kubenswrapper[4702]: I1125 10:50:29.957148 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" event={"ID":"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17","Type":"ContainerStarted","Data":"821d7818ef3681962bb2109e2283b4ae1425cc55c5d7734784f2bcccf1497b01"} Nov 25 10:50:36 crc kubenswrapper[4702]: I1125 10:50:36.007996 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" event={"ID":"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17","Type":"ContainerStarted","Data":"96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b"} Nov 25 10:50:36 crc kubenswrapper[4702]: I1125 10:50:36.031640 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" podStartSLOduration=1.507449327 podStartE2EDuration="7.031615856s" podCreationTimestamp="2025-11-25 10:50:29 +0000 UTC" firstStartedPulling="2025-11-25 10:50:29.918317859 +0000 UTC m=+1127.284913548" lastFinishedPulling="2025-11-25 10:50:35.442484398 +0000 UTC m=+1132.809080077" observedRunningTime="2025-11-25 10:50:36.02444198 +0000 UTC m=+1133.391037689" watchObservedRunningTime="2025-11-25 10:50:36.031615856 +0000 UTC m=+1133.398211555" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.735380 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.736928 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.740348 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"rabbitmq-erlang-cookie" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.740373 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"rabbitmq-server-dockercfg-p8gf8" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.740679 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"rabbitmq-default-user" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.740689 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"rabbitmq-plugins-conf" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.742032 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"rabbitmq-server-conf" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.743775 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930287 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nrqc6\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-kube-api-access-nrqc6\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930342 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930368 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930389 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930416 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-06ad8973-3126-4d03-bd80-02618c78d955\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930446 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930467 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:39 crc kubenswrapper[4702]: I1125 10:50:39.930493 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.032602 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.033061 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.033135 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.033427 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.033501 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.034381 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nrqc6\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-kube-api-access-nrqc6\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.034729 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.034771 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.034792 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.035457 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.034810 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-06ad8973-3126-4d03-bd80-02618c78d955\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.037948 4702 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.037974 4702 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-06ad8973-3126-4d03-bd80-02618c78d955\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/0f0836334fdd63a526fd95970e9e232e5d6fd78dd7393e9e92bbc4bc3fe7c629/globalmount\"" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.039334 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.042937 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.052879 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-pod-info\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.057735 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nrqc6\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-kube-api-access-nrqc6\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.059339 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-06ad8973-3126-4d03-bd80-02618c78d955\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") pod \"rabbitmq-server-0\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.353438 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:50:40 crc kubenswrapper[4702]: I1125 10:50:40.783565 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.038380 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8","Type":"ContainerStarted","Data":"21710379d2aed65f6759d3587d04d60a691b86dfda1bdb9ead5d656e63d2629f"} Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.465043 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-j9plj"] Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.465786 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.469728 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-8zmqt" Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.473344 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-j9plj"] Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.658484 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mqb74\" (UniqueName: \"kubernetes.io/projected/7c562a4b-ceef-41a2-aa46-a2962017eb2b-kube-api-access-mqb74\") pod \"keystone-operator-index-j9plj\" (UID: \"7c562a4b-ceef-41a2-aa46-a2962017eb2b\") " pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.759480 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mqb74\" (UniqueName: \"kubernetes.io/projected/7c562a4b-ceef-41a2-aa46-a2962017eb2b-kube-api-access-mqb74\") pod \"keystone-operator-index-j9plj\" (UID: \"7c562a4b-ceef-41a2-aa46-a2962017eb2b\") " pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.777770 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mqb74\" (UniqueName: \"kubernetes.io/projected/7c562a4b-ceef-41a2-aa46-a2962017eb2b-kube-api-access-mqb74\") pod \"keystone-operator-index-j9plj\" (UID: \"7c562a4b-ceef-41a2-aa46-a2962017eb2b\") " pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.787475 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:50:41 crc kubenswrapper[4702]: I1125 10:50:41.972682 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-j9plj"] Nov 25 10:50:41 crc kubenswrapper[4702]: W1125 10:50:41.984959 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7c562a4b_ceef_41a2_aa46_a2962017eb2b.slice/crio-a8a730ab7b7b52f2383c1ea37cbe6b7e3be398ae62463db4fd106871d13cca8e WatchSource:0}: Error finding container a8a730ab7b7b52f2383c1ea37cbe6b7e3be398ae62463db4fd106871d13cca8e: Status 404 returned error can't find the container with id a8a730ab7b7b52f2383c1ea37cbe6b7e3be398ae62463db4fd106871d13cca8e Nov 25 10:50:42 crc kubenswrapper[4702]: I1125 10:50:42.046516 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-j9plj" event={"ID":"7c562a4b-ceef-41a2-aa46-a2962017eb2b","Type":"ContainerStarted","Data":"a8a730ab7b7b52f2383c1ea37cbe6b7e3be398ae62463db4fd106871d13cca8e"} Nov 25 10:50:57 crc kubenswrapper[4702]: I1125 10:50:57.145239 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-j9plj" event={"ID":"7c562a4b-ceef-41a2-aa46-a2962017eb2b","Type":"ContainerStarted","Data":"d962d7e00791a4f71fd52183116aa7ac77c107948516c26bfda67efb406f5f0a"} Nov 25 10:50:57 crc kubenswrapper[4702]: I1125 10:50:57.161811 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-j9plj" podStartSLOduration=1.367347452 podStartE2EDuration="16.161792787s" podCreationTimestamp="2025-11-25 10:50:41 +0000 UTC" firstStartedPulling="2025-11-25 10:50:41.986744285 +0000 UTC m=+1139.353339974" lastFinishedPulling="2025-11-25 10:50:56.78118962 +0000 UTC m=+1154.147785309" observedRunningTime="2025-11-25 10:50:57.159224693 +0000 UTC m=+1154.525820382" watchObservedRunningTime="2025-11-25 10:50:57.161792787 +0000 UTC m=+1154.528388476" Nov 25 10:50:58 crc kubenswrapper[4702]: I1125 10:50:58.153227 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8","Type":"ContainerStarted","Data":"b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80"} Nov 25 10:51:01 crc kubenswrapper[4702]: I1125 10:51:01.788965 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:51:01 crc kubenswrapper[4702]: I1125 10:51:01.789324 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:51:01 crc kubenswrapper[4702]: I1125 10:51:01.822609 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:51:02 crc kubenswrapper[4702]: I1125 10:51:02.203856 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.114979 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq"] Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.117118 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.119372 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-wkv7r" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.127942 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq"] Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.266769 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6b2mg\" (UniqueName: \"kubernetes.io/projected/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-kube-api-access-6b2mg\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.266843 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-bundle\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.266869 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-util\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.368010 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6b2mg\" (UniqueName: \"kubernetes.io/projected/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-kube-api-access-6b2mg\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.368108 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-bundle\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.368138 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-util\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.368613 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-util\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.368657 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-bundle\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.392358 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6b2mg\" (UniqueName: \"kubernetes.io/projected/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-kube-api-access-6b2mg\") pod \"7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.435570 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:10 crc kubenswrapper[4702]: I1125 10:51:10.865672 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq"] Nov 25 10:51:11 crc kubenswrapper[4702]: I1125 10:51:11.235754 4702 generic.go:334] "Generic (PLEG): container finished" podID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerID="e3e6b965d770280718b1f141caec6f653cbaf915b00ffa7fa3ac958db9c6af55" exitCode=0 Nov 25 10:51:11 crc kubenswrapper[4702]: I1125 10:51:11.235800 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" event={"ID":"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf","Type":"ContainerDied","Data":"e3e6b965d770280718b1f141caec6f653cbaf915b00ffa7fa3ac958db9c6af55"} Nov 25 10:51:11 crc kubenswrapper[4702]: I1125 10:51:11.235848 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" event={"ID":"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf","Type":"ContainerStarted","Data":"9917d8f38884a798826f80b45a59f66d54861c3b104b6c11d05b97cb7c251b38"} Nov 25 10:51:12 crc kubenswrapper[4702]: I1125 10:51:12.258139 4702 generic.go:334] "Generic (PLEG): container finished" podID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerID="0b0a58def35a854f026ccf555c0f0542874c3073569f6a6ac85a710fd19c8db6" exitCode=0 Nov 25 10:51:12 crc kubenswrapper[4702]: I1125 10:51:12.258238 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" event={"ID":"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf","Type":"ContainerDied","Data":"0b0a58def35a854f026ccf555c0f0542874c3073569f6a6ac85a710fd19c8db6"} Nov 25 10:51:13 crc kubenswrapper[4702]: I1125 10:51:13.267853 4702 generic.go:334] "Generic (PLEG): container finished" podID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerID="1bb300c1d91a75fe78b93d9cbf105112d102e1e923a6f8d8190b3d5cd1ee66fa" exitCode=0 Nov 25 10:51:13 crc kubenswrapper[4702]: I1125 10:51:13.267919 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" event={"ID":"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf","Type":"ContainerDied","Data":"1bb300c1d91a75fe78b93d9cbf105112d102e1e923a6f8d8190b3d5cd1ee66fa"} Nov 25 10:51:13 crc kubenswrapper[4702]: I1125 10:51:13.590923 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:51:13 crc kubenswrapper[4702]: I1125 10:51:13.590990 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.543394 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.728553 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6b2mg\" (UniqueName: \"kubernetes.io/projected/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-kube-api-access-6b2mg\") pod \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.729048 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-util\") pod \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.729136 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-bundle\") pod \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\" (UID: \"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf\") " Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.730127 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-bundle" (OuterVolumeSpecName: "bundle") pod "9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" (UID: "9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.737388 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-kube-api-access-6b2mg" (OuterVolumeSpecName: "kube-api-access-6b2mg") pod "9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" (UID: "9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf"). InnerVolumeSpecName "kube-api-access-6b2mg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.745703 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-util" (OuterVolumeSpecName: "util") pod "9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" (UID: "9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.831087 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6b2mg\" (UniqueName: \"kubernetes.io/projected/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-kube-api-access-6b2mg\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.831122 4702 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-util\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:14 crc kubenswrapper[4702]: I1125 10:51:14.831134 4702 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:15 crc kubenswrapper[4702]: I1125 10:51:15.283114 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" event={"ID":"9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf","Type":"ContainerDied","Data":"9917d8f38884a798826f80b45a59f66d54861c3b104b6c11d05b97cb7c251b38"} Nov 25 10:51:15 crc kubenswrapper[4702]: I1125 10:51:15.283154 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9917d8f38884a798826f80b45a59f66d54861c3b104b6c11d05b97cb7c251b38" Nov 25 10:51:15 crc kubenswrapper[4702]: I1125 10:51:15.283154 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.447839 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw"] Nov 25 10:51:21 crc kubenswrapper[4702]: E1125 10:51:21.450837 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="pull" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.450859 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="pull" Nov 25 10:51:21 crc kubenswrapper[4702]: E1125 10:51:21.450918 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="util" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.450928 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="util" Nov 25 10:51:21 crc kubenswrapper[4702]: E1125 10:51:21.450950 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="extract" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.450956 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="extract" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.451191 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" containerName="extract" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.452335 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.456526 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-k28rf" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.456844 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.475875 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw"] Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.583859 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-apiservice-cert\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.583914 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-webhook-cert\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.583954 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sp9bp\" (UniqueName: \"kubernetes.io/projected/f106a32a-e625-4380-9d1c-683bbf9036bc-kube-api-access-sp9bp\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.684775 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-apiservice-cert\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.685018 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-webhook-cert\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.685119 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sp9bp\" (UniqueName: \"kubernetes.io/projected/f106a32a-e625-4380-9d1c-683bbf9036bc-kube-api-access-sp9bp\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.690990 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-apiservice-cert\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.705213 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-webhook-cert\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.710615 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sp9bp\" (UniqueName: \"kubernetes.io/projected/f106a32a-e625-4380-9d1c-683bbf9036bc-kube-api-access-sp9bp\") pod \"keystone-operator-controller-manager-6d87d877ff-4vvkw\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:21 crc kubenswrapper[4702]: I1125 10:51:21.790800 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:22 crc kubenswrapper[4702]: I1125 10:51:22.225206 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw"] Nov 25 10:51:22 crc kubenswrapper[4702]: I1125 10:51:22.876506 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" event={"ID":"f106a32a-e625-4380-9d1c-683bbf9036bc","Type":"ContainerStarted","Data":"9b2758f30ccb273b264c686972218dcbea73a3ece2da88f33317f0f509743602"} Nov 25 10:51:25 crc kubenswrapper[4702]: I1125 10:51:25.895763 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" event={"ID":"f106a32a-e625-4380-9d1c-683bbf9036bc","Type":"ContainerStarted","Data":"5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca"} Nov 25 10:51:25 crc kubenswrapper[4702]: I1125 10:51:25.896321 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:25 crc kubenswrapper[4702]: I1125 10:51:25.920830 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" podStartSLOduration=2.115064382 podStartE2EDuration="4.92080186s" podCreationTimestamp="2025-11-25 10:51:21 +0000 UTC" firstStartedPulling="2025-11-25 10:51:22.231310381 +0000 UTC m=+1179.597906070" lastFinishedPulling="2025-11-25 10:51:25.037047859 +0000 UTC m=+1182.403643548" observedRunningTime="2025-11-25 10:51:25.910388119 +0000 UTC m=+1183.276983818" watchObservedRunningTime="2025-11-25 10:51:25.92080186 +0000 UTC m=+1183.287397549" Nov 25 10:51:29 crc kubenswrapper[4702]: I1125 10:51:29.919051 4702 generic.go:334] "Generic (PLEG): container finished" podID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerID="b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80" exitCode=0 Nov 25 10:51:29 crc kubenswrapper[4702]: I1125 10:51:29.919116 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8","Type":"ContainerDied","Data":"b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80"} Nov 25 10:51:30 crc kubenswrapper[4702]: I1125 10:51:30.928708 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8","Type":"ContainerStarted","Data":"393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8"} Nov 25 10:51:30 crc kubenswrapper[4702]: I1125 10:51:30.929991 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:51:30 crc kubenswrapper[4702]: I1125 10:51:30.954973 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.890582464 podStartE2EDuration="52.954954188s" podCreationTimestamp="2025-11-25 10:50:38 +0000 UTC" firstStartedPulling="2025-11-25 10:50:40.79346799 +0000 UTC m=+1138.160063679" lastFinishedPulling="2025-11-25 10:50:56.857839704 +0000 UTC m=+1154.224435403" observedRunningTime="2025-11-25 10:51:30.950807918 +0000 UTC m=+1188.317403637" watchObservedRunningTime="2025-11-25 10:51:30.954954188 +0000 UTC m=+1188.321549877" Nov 25 10:51:31 crc kubenswrapper[4702]: I1125 10:51:31.795928 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.891159 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-p7dzq"] Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.892612 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.897164 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s"] Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.897992 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.899543 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.903927 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-p7dzq"] Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.912828 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s"] Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.975680 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f573dd2-7fba-4c83-88a5-af809f6a1945-operator-scripts\") pod \"keystone-db-create-p7dzq\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.975738 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h86lv\" (UniqueName: \"kubernetes.io/projected/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-kube-api-access-h86lv\") pod \"keystone-7756-account-create-update-jdv6s\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.975778 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-operator-scripts\") pod \"keystone-7756-account-create-update-jdv6s\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:33 crc kubenswrapper[4702]: I1125 10:51:33.975959 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4h4s7\" (UniqueName: \"kubernetes.io/projected/2f573dd2-7fba-4c83-88a5-af809f6a1945-kube-api-access-4h4s7\") pod \"keystone-db-create-p7dzq\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.077335 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4h4s7\" (UniqueName: \"kubernetes.io/projected/2f573dd2-7fba-4c83-88a5-af809f6a1945-kube-api-access-4h4s7\") pod \"keystone-db-create-p7dzq\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.077652 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f573dd2-7fba-4c83-88a5-af809f6a1945-operator-scripts\") pod \"keystone-db-create-p7dzq\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.077752 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h86lv\" (UniqueName: \"kubernetes.io/projected/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-kube-api-access-h86lv\") pod \"keystone-7756-account-create-update-jdv6s\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.077864 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-operator-scripts\") pod \"keystone-7756-account-create-update-jdv6s\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.078548 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-operator-scripts\") pod \"keystone-7756-account-create-update-jdv6s\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.078548 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f573dd2-7fba-4c83-88a5-af809f6a1945-operator-scripts\") pod \"keystone-db-create-p7dzq\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.095125 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4h4s7\" (UniqueName: \"kubernetes.io/projected/2f573dd2-7fba-4c83-88a5-af809f6a1945-kube-api-access-4h4s7\") pod \"keystone-db-create-p7dzq\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.095296 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h86lv\" (UniqueName: \"kubernetes.io/projected/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-kube-api-access-h86lv\") pod \"keystone-7756-account-create-update-jdv6s\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.214049 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.228024 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.702988 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s"] Nov 25 10:51:34 crc kubenswrapper[4702]: W1125 10:51:34.712972 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod359938d3_a87f_48ab_98dc_f2c9ca99f6ee.slice/crio-3590a68408313cec5ad647875002828354f3c089f82bfa301d1fc6cfa380bed2 WatchSource:0}: Error finding container 3590a68408313cec5ad647875002828354f3c089f82bfa301d1fc6cfa380bed2: Status 404 returned error can't find the container with id 3590a68408313cec5ad647875002828354f3c089f82bfa301d1fc6cfa380bed2 Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.730152 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-p7dzq"] Nov 25 10:51:34 crc kubenswrapper[4702]: W1125 10:51:34.737087 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod2f573dd2_7fba_4c83_88a5_af809f6a1945.slice/crio-1ba78f3df1862bae1501716af15bd4e0000e0e65a66579a475b6b1d81f0e3355 WatchSource:0}: Error finding container 1ba78f3df1862bae1501716af15bd4e0000e0e65a66579a475b6b1d81f0e3355: Status 404 returned error can't find the container with id 1ba78f3df1862bae1501716af15bd4e0000e0e65a66579a475b6b1d81f0e3355 Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.957848 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" event={"ID":"2f573dd2-7fba-4c83-88a5-af809f6a1945","Type":"ContainerStarted","Data":"1ba78f3df1862bae1501716af15bd4e0000e0e65a66579a475b6b1d81f0e3355"} Nov 25 10:51:34 crc kubenswrapper[4702]: I1125 10:51:34.959115 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" event={"ID":"359938d3-a87f-48ab-98dc-f2c9ca99f6ee","Type":"ContainerStarted","Data":"3590a68408313cec5ad647875002828354f3c089f82bfa301d1fc6cfa380bed2"} Nov 25 10:51:35 crc kubenswrapper[4702]: I1125 10:51:35.966009 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" event={"ID":"2f573dd2-7fba-4c83-88a5-af809f6a1945","Type":"ContainerStarted","Data":"5ff68c154a540a829475eb3c4bf0ad2f334b79bf3d216f2cf9d1e35c4d2518d7"} Nov 25 10:51:35 crc kubenswrapper[4702]: I1125 10:51:35.967061 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" event={"ID":"359938d3-a87f-48ab-98dc-f2c9ca99f6ee","Type":"ContainerStarted","Data":"f67620f39482a658358a642b5d376a90c33a2d0b31316efbcff085ad3bfa5aab"} Nov 25 10:51:35 crc kubenswrapper[4702]: I1125 10:51:35.984070 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" podStartSLOduration=2.98405026 podStartE2EDuration="2.98405026s" podCreationTimestamp="2025-11-25 10:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:51:35.9802709 +0000 UTC m=+1193.346866599" watchObservedRunningTime="2025-11-25 10:51:35.98405026 +0000 UTC m=+1193.350645949" Nov 25 10:51:35 crc kubenswrapper[4702]: I1125 10:51:35.994619 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" podStartSLOduration=2.994591485 podStartE2EDuration="2.994591485s" podCreationTimestamp="2025-11-25 10:51:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:51:35.992054561 +0000 UTC m=+1193.358650250" watchObservedRunningTime="2025-11-25 10:51:35.994591485 +0000 UTC m=+1193.361187164" Nov 25 10:51:36 crc kubenswrapper[4702]: I1125 10:51:36.975708 4702 generic.go:334] "Generic (PLEG): container finished" podID="2f573dd2-7fba-4c83-88a5-af809f6a1945" containerID="5ff68c154a540a829475eb3c4bf0ad2f334b79bf3d216f2cf9d1e35c4d2518d7" exitCode=0 Nov 25 10:51:36 crc kubenswrapper[4702]: I1125 10:51:36.975795 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" event={"ID":"2f573dd2-7fba-4c83-88a5-af809f6a1945","Type":"ContainerDied","Data":"5ff68c154a540a829475eb3c4bf0ad2f334b79bf3d216f2cf9d1e35c4d2518d7"} Nov 25 10:51:37 crc kubenswrapper[4702]: I1125 10:51:37.983958 4702 generic.go:334] "Generic (PLEG): container finished" podID="359938d3-a87f-48ab-98dc-f2c9ca99f6ee" containerID="f67620f39482a658358a642b5d376a90c33a2d0b31316efbcff085ad3bfa5aab" exitCode=0 Nov 25 10:51:37 crc kubenswrapper[4702]: I1125 10:51:37.984054 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" event={"ID":"359938d3-a87f-48ab-98dc-f2c9ca99f6ee","Type":"ContainerDied","Data":"f67620f39482a658358a642b5d376a90c33a2d0b31316efbcff085ad3bfa5aab"} Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.319047 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.438566 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f573dd2-7fba-4c83-88a5-af809f6a1945-operator-scripts\") pod \"2f573dd2-7fba-4c83-88a5-af809f6a1945\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.438627 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4h4s7\" (UniqueName: \"kubernetes.io/projected/2f573dd2-7fba-4c83-88a5-af809f6a1945-kube-api-access-4h4s7\") pod \"2f573dd2-7fba-4c83-88a5-af809f6a1945\" (UID: \"2f573dd2-7fba-4c83-88a5-af809f6a1945\") " Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.439033 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2f573dd2-7fba-4c83-88a5-af809f6a1945-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2f573dd2-7fba-4c83-88a5-af809f6a1945" (UID: "2f573dd2-7fba-4c83-88a5-af809f6a1945"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.443977 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2f573dd2-7fba-4c83-88a5-af809f6a1945-kube-api-access-4h4s7" (OuterVolumeSpecName: "kube-api-access-4h4s7") pod "2f573dd2-7fba-4c83-88a5-af809f6a1945" (UID: "2f573dd2-7fba-4c83-88a5-af809f6a1945"). InnerVolumeSpecName "kube-api-access-4h4s7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.542244 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2f573dd2-7fba-4c83-88a5-af809f6a1945-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.542282 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4h4s7\" (UniqueName: \"kubernetes.io/projected/2f573dd2-7fba-4c83-88a5-af809f6a1945-kube-api-access-4h4s7\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.993643 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.993642 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-p7dzq" event={"ID":"2f573dd2-7fba-4c83-88a5-af809f6a1945","Type":"ContainerDied","Data":"1ba78f3df1862bae1501716af15bd4e0000e0e65a66579a475b6b1d81f0e3355"} Nov 25 10:51:38 crc kubenswrapper[4702]: I1125 10:51:38.994071 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="1ba78f3df1862bae1501716af15bd4e0000e0e65a66579a475b6b1d81f0e3355" Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.252667 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.352507 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-operator-scripts\") pod \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.352629 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h86lv\" (UniqueName: \"kubernetes.io/projected/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-kube-api-access-h86lv\") pod \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\" (UID: \"359938d3-a87f-48ab-98dc-f2c9ca99f6ee\") " Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.353949 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "359938d3-a87f-48ab-98dc-f2c9ca99f6ee" (UID: "359938d3-a87f-48ab-98dc-f2c9ca99f6ee"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.357152 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-kube-api-access-h86lv" (OuterVolumeSpecName: "kube-api-access-h86lv") pod "359938d3-a87f-48ab-98dc-f2c9ca99f6ee" (UID: "359938d3-a87f-48ab-98dc-f2c9ca99f6ee"). InnerVolumeSpecName "kube-api-access-h86lv". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.453860 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:39 crc kubenswrapper[4702]: I1125 10:51:39.453908 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h86lv\" (UniqueName: \"kubernetes.io/projected/359938d3-a87f-48ab-98dc-f2c9ca99f6ee-kube-api-access-h86lv\") on node \"crc\" DevicePath \"\"" Nov 25 10:51:40 crc kubenswrapper[4702]: I1125 10:51:40.000781 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" event={"ID":"359938d3-a87f-48ab-98dc-f2c9ca99f6ee","Type":"ContainerDied","Data":"3590a68408313cec5ad647875002828354f3c089f82bfa301d1fc6cfa380bed2"} Nov 25 10:51:40 crc kubenswrapper[4702]: I1125 10:51:40.000823 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3590a68408313cec5ad647875002828354f3c089f82bfa301d1fc6cfa380bed2" Nov 25 10:51:40 crc kubenswrapper[4702]: I1125 10:51:40.001715 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s" Nov 25 10:51:40 crc kubenswrapper[4702]: I1125 10:51:40.357025 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 10:51:43 crc kubenswrapper[4702]: I1125 10:51:43.590500 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:51:43 crc kubenswrapper[4702]: I1125 10:51:43.590867 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.370599 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-f7hkk"] Nov 25 10:51:44 crc kubenswrapper[4702]: E1125 10:51:44.371203 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="359938d3-a87f-48ab-98dc-f2c9ca99f6ee" containerName="mariadb-account-create-update" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.371217 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="359938d3-a87f-48ab-98dc-f2c9ca99f6ee" containerName="mariadb-account-create-update" Nov 25 10:51:44 crc kubenswrapper[4702]: E1125 10:51:44.371243 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2f573dd2-7fba-4c83-88a5-af809f6a1945" containerName="mariadb-database-create" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.371250 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2f573dd2-7fba-4c83-88a5-af809f6a1945" containerName="mariadb-database-create" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.371354 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="359938d3-a87f-48ab-98dc-f2c9ca99f6ee" containerName="mariadb-account-create-update" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.371369 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="2f573dd2-7fba-4c83-88a5-af809f6a1945" containerName="mariadb-database-create" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.371811 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.373936 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.374295 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.374464 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-mcfxj" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.374788 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.385954 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-f7hkk"] Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.419371 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8498651e-f22c-40ea-80c4-8502b93e8a9c-config-data\") pod \"keystone-db-sync-f7hkk\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.419427 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5t8cn\" (UniqueName: \"kubernetes.io/projected/8498651e-f22c-40ea-80c4-8502b93e8a9c-kube-api-access-5t8cn\") pod \"keystone-db-sync-f7hkk\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.520879 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8498651e-f22c-40ea-80c4-8502b93e8a9c-config-data\") pod \"keystone-db-sync-f7hkk\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.521016 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5t8cn\" (UniqueName: \"kubernetes.io/projected/8498651e-f22c-40ea-80c4-8502b93e8a9c-kube-api-access-5t8cn\") pod \"keystone-db-sync-f7hkk\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.532007 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8498651e-f22c-40ea-80c4-8502b93e8a9c-config-data\") pod \"keystone-db-sync-f7hkk\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.538459 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5t8cn\" (UniqueName: \"kubernetes.io/projected/8498651e-f22c-40ea-80c4-8502b93e8a9c-kube-api-access-5t8cn\") pod \"keystone-db-sync-f7hkk\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:44 crc kubenswrapper[4702]: I1125 10:51:44.701322 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:51:45 crc kubenswrapper[4702]: I1125 10:51:45.103075 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-f7hkk"] Nov 25 10:51:46 crc kubenswrapper[4702]: I1125 10:51:46.045694 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" event={"ID":"8498651e-f22c-40ea-80c4-8502b93e8a9c","Type":"ContainerStarted","Data":"90ae8dd557a2c35ef890aca0c80d8620ec8bf3deb4a32f7ba8a7e6cd0b4befdc"} Nov 25 10:51:52 crc kubenswrapper[4702]: I1125 10:51:52.084289 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" event={"ID":"8498651e-f22c-40ea-80c4-8502b93e8a9c","Type":"ContainerStarted","Data":"7e1990770a2084e80800f57afb9fa8259aeff7c080de121e269a147b232930d2"} Nov 25 10:51:52 crc kubenswrapper[4702]: I1125 10:51:52.103731 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" podStartSLOduration=1.583674123 podStartE2EDuration="8.103707866s" podCreationTimestamp="2025-11-25 10:51:44 +0000 UTC" firstStartedPulling="2025-11-25 10:51:45.113066056 +0000 UTC m=+1202.479661745" lastFinishedPulling="2025-11-25 10:51:51.633099799 +0000 UTC m=+1208.999695488" observedRunningTime="2025-11-25 10:51:52.098176806 +0000 UTC m=+1209.464772485" watchObservedRunningTime="2025-11-25 10:51:52.103707866 +0000 UTC m=+1209.470303555" Nov 25 10:52:03 crc kubenswrapper[4702]: I1125 10:52:03.161424 4702 generic.go:334] "Generic (PLEG): container finished" podID="8498651e-f22c-40ea-80c4-8502b93e8a9c" containerID="7e1990770a2084e80800f57afb9fa8259aeff7c080de121e269a147b232930d2" exitCode=0 Nov 25 10:52:03 crc kubenswrapper[4702]: I1125 10:52:03.161514 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" event={"ID":"8498651e-f22c-40ea-80c4-8502b93e8a9c","Type":"ContainerDied","Data":"7e1990770a2084e80800f57afb9fa8259aeff7c080de121e269a147b232930d2"} Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.440077 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.493603 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8498651e-f22c-40ea-80c4-8502b93e8a9c-config-data\") pod \"8498651e-f22c-40ea-80c4-8502b93e8a9c\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.493669 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5t8cn\" (UniqueName: \"kubernetes.io/projected/8498651e-f22c-40ea-80c4-8502b93e8a9c-kube-api-access-5t8cn\") pod \"8498651e-f22c-40ea-80c4-8502b93e8a9c\" (UID: \"8498651e-f22c-40ea-80c4-8502b93e8a9c\") " Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.500271 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8498651e-f22c-40ea-80c4-8502b93e8a9c-kube-api-access-5t8cn" (OuterVolumeSpecName: "kube-api-access-5t8cn") pod "8498651e-f22c-40ea-80c4-8502b93e8a9c" (UID: "8498651e-f22c-40ea-80c4-8502b93e8a9c"). InnerVolumeSpecName "kube-api-access-5t8cn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.531557 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8498651e-f22c-40ea-80c4-8502b93e8a9c-config-data" (OuterVolumeSpecName: "config-data") pod "8498651e-f22c-40ea-80c4-8502b93e8a9c" (UID: "8498651e-f22c-40ea-80c4-8502b93e8a9c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.595298 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8498651e-f22c-40ea-80c4-8502b93e8a9c-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:04 crc kubenswrapper[4702]: I1125 10:52:04.595336 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5t8cn\" (UniqueName: \"kubernetes.io/projected/8498651e-f22c-40ea-80c4-8502b93e8a9c-kube-api-access-5t8cn\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.176786 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" event={"ID":"8498651e-f22c-40ea-80c4-8502b93e8a9c","Type":"ContainerDied","Data":"90ae8dd557a2c35ef890aca0c80d8620ec8bf3deb4a32f7ba8a7e6cd0b4befdc"} Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.177221 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="90ae8dd557a2c35ef890aca0c80d8620ec8bf3deb4a32f7ba8a7e6cd0b4befdc" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.177052 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-f7hkk" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.389240 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-75hqj"] Nov 25 10:52:05 crc kubenswrapper[4702]: E1125 10:52:05.389545 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8498651e-f22c-40ea-80c4-8502b93e8a9c" containerName="keystone-db-sync" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.389563 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="8498651e-f22c-40ea-80c4-8502b93e8a9c" containerName="keystone-db-sync" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.389701 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="8498651e-f22c-40ea-80c4-8502b93e8a9c" containerName="keystone-db-sync" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.390239 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.395285 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.395461 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.395536 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-mcfxj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.397377 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.397797 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.409951 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-config-data\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.410001 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-fernet-keys\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.410054 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-credential-keys\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.410078 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-scripts\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.410232 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5pdp7\" (UniqueName: \"kubernetes.io/projected/4eb643af-1421-4dbb-9935-d6ef47da1571-kube-api-access-5pdp7\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.411727 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-75hqj"] Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.511454 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-credential-keys\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.511514 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-scripts\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.511592 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5pdp7\" (UniqueName: \"kubernetes.io/projected/4eb643af-1421-4dbb-9935-d6ef47da1571-kube-api-access-5pdp7\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.511624 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-config-data\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.511651 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-fernet-keys\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.520007 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-scripts\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.520180 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-fernet-keys\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.520262 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-credential-keys\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.520491 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-config-data\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.528495 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5pdp7\" (UniqueName: \"kubernetes.io/projected/4eb643af-1421-4dbb-9935-d6ef47da1571-kube-api-access-5pdp7\") pod \"keystone-bootstrap-75hqj\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:05 crc kubenswrapper[4702]: I1125 10:52:05.704732 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:06 crc kubenswrapper[4702]: I1125 10:52:06.109345 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-75hqj"] Nov 25 10:52:06 crc kubenswrapper[4702]: I1125 10:52:06.184111 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" event={"ID":"4eb643af-1421-4dbb-9935-d6ef47da1571","Type":"ContainerStarted","Data":"25be6299a95d6ea7ec1ba02fb6458a366b2df89d5e4c4f0e9a2899b3a7268713"} Nov 25 10:52:07 crc kubenswrapper[4702]: I1125 10:52:07.191877 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" event={"ID":"4eb643af-1421-4dbb-9935-d6ef47da1571","Type":"ContainerStarted","Data":"1145f142980a50a9decfeef6a1c8d3089b6091c4973debffcd495e71c2f73ec1"} Nov 25 10:52:07 crc kubenswrapper[4702]: I1125 10:52:07.210703 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" podStartSLOduration=2.210685741 podStartE2EDuration="2.210685741s" podCreationTimestamp="2025-11-25 10:52:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:52:07.20685119 +0000 UTC m=+1224.573446889" watchObservedRunningTime="2025-11-25 10:52:07.210685741 +0000 UTC m=+1224.577281430" Nov 25 10:52:10 crc kubenswrapper[4702]: I1125 10:52:10.211047 4702 generic.go:334] "Generic (PLEG): container finished" podID="4eb643af-1421-4dbb-9935-d6ef47da1571" containerID="1145f142980a50a9decfeef6a1c8d3089b6091c4973debffcd495e71c2f73ec1" exitCode=0 Nov 25 10:52:10 crc kubenswrapper[4702]: I1125 10:52:10.211173 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" event={"ID":"4eb643af-1421-4dbb-9935-d6ef47da1571","Type":"ContainerDied","Data":"1145f142980a50a9decfeef6a1c8d3089b6091c4973debffcd495e71c2f73ec1"} Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.506632 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.699646 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-credential-keys\") pod \"4eb643af-1421-4dbb-9935-d6ef47da1571\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.699717 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-config-data\") pod \"4eb643af-1421-4dbb-9935-d6ef47da1571\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.699756 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-scripts\") pod \"4eb643af-1421-4dbb-9935-d6ef47da1571\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.699780 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-fernet-keys\") pod \"4eb643af-1421-4dbb-9935-d6ef47da1571\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.700801 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5pdp7\" (UniqueName: \"kubernetes.io/projected/4eb643af-1421-4dbb-9935-d6ef47da1571-kube-api-access-5pdp7\") pod \"4eb643af-1421-4dbb-9935-d6ef47da1571\" (UID: \"4eb643af-1421-4dbb-9935-d6ef47da1571\") " Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.707625 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-scripts" (OuterVolumeSpecName: "scripts") pod "4eb643af-1421-4dbb-9935-d6ef47da1571" (UID: "4eb643af-1421-4dbb-9935-d6ef47da1571"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.707762 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "4eb643af-1421-4dbb-9935-d6ef47da1571" (UID: "4eb643af-1421-4dbb-9935-d6ef47da1571"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.711305 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eb643af-1421-4dbb-9935-d6ef47da1571-kube-api-access-5pdp7" (OuterVolumeSpecName: "kube-api-access-5pdp7") pod "4eb643af-1421-4dbb-9935-d6ef47da1571" (UID: "4eb643af-1421-4dbb-9935-d6ef47da1571"). InnerVolumeSpecName "kube-api-access-5pdp7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.713095 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "4eb643af-1421-4dbb-9935-d6ef47da1571" (UID: "4eb643af-1421-4dbb-9935-d6ef47da1571"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.721310 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-config-data" (OuterVolumeSpecName: "config-data") pod "4eb643af-1421-4dbb-9935-d6ef47da1571" (UID: "4eb643af-1421-4dbb-9935-d6ef47da1571"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.803411 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5pdp7\" (UniqueName: \"kubernetes.io/projected/4eb643af-1421-4dbb-9935-d6ef47da1571-kube-api-access-5pdp7\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.803447 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.803457 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.803465 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:11 crc kubenswrapper[4702]: I1125 10:52:11.803474 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/4eb643af-1421-4dbb-9935-d6ef47da1571-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.225073 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" event={"ID":"4eb643af-1421-4dbb-9935-d6ef47da1571","Type":"ContainerDied","Data":"25be6299a95d6ea7ec1ba02fb6458a366b2df89d5e4c4f0e9a2899b3a7268713"} Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.225117 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="25be6299a95d6ea7ec1ba02fb6458a366b2df89d5e4c4f0e9a2899b3a7268713" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.225143 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-75hqj" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.316267 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx"] Nov 25 10:52:12 crc kubenswrapper[4702]: E1125 10:52:12.316737 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eb643af-1421-4dbb-9935-d6ef47da1571" containerName="keystone-bootstrap" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.316766 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eb643af-1421-4dbb-9935-d6ef47da1571" containerName="keystone-bootstrap" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.317142 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eb643af-1421-4dbb-9935-d6ef47da1571" containerName="keystone-bootstrap" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.317889 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.326188 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx"] Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.361174 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.361307 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.363591 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.364273 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-mcfxj" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.411839 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7gm6x\" (UniqueName: \"kubernetes.io/projected/28c759fb-ef33-4821-a1b1-499f7fecf939-kube-api-access-7gm6x\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.411917 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-scripts\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.411956 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-fernet-keys\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.411982 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-config-data\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.412013 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-credential-keys\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.513576 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7gm6x\" (UniqueName: \"kubernetes.io/projected/28c759fb-ef33-4821-a1b1-499f7fecf939-kube-api-access-7gm6x\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.513649 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-scripts\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.513681 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-fernet-keys\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.513708 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-config-data\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.513729 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-credential-keys\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.520609 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-fernet-keys\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.520668 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-config-data\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.527446 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-scripts\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.527861 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-credential-keys\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.538687 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7gm6x\" (UniqueName: \"kubernetes.io/projected/28c759fb-ef33-4821-a1b1-499f7fecf939-kube-api-access-7gm6x\") pod \"keystone-6c9c98f9b6-b4nnx\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:12 crc kubenswrapper[4702]: I1125 10:52:12.688400 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.101738 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx"] Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.234124 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" event={"ID":"28c759fb-ef33-4821-a1b1-499f7fecf939","Type":"ContainerStarted","Data":"d24a573ce5746d68e951fd99aa5207ee3a4faa28de745deaae2ab694d3d5db60"} Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.591460 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.591517 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.591560 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.592133 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"c995a3a58802015484aaf059ef2d7a1f54e8b9c0222aaf2fd6574984d2674473"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:52:13 crc kubenswrapper[4702]: I1125 10:52:13.592177 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://c995a3a58802015484aaf059ef2d7a1f54e8b9c0222aaf2fd6574984d2674473" gracePeriod=600 Nov 25 10:52:14 crc kubenswrapper[4702]: I1125 10:52:14.245138 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="c995a3a58802015484aaf059ef2d7a1f54e8b9c0222aaf2fd6574984d2674473" exitCode=0 Nov 25 10:52:14 crc kubenswrapper[4702]: I1125 10:52:14.245803 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"c995a3a58802015484aaf059ef2d7a1f54e8b9c0222aaf2fd6574984d2674473"} Nov 25 10:52:14 crc kubenswrapper[4702]: I1125 10:52:14.245919 4702 scope.go:117] "RemoveContainer" containerID="3dfc5653761b32b3ec56bc025998710cbd6ef0729baccffe43614c093e896dd9" Nov 25 10:52:14 crc kubenswrapper[4702]: I1125 10:52:14.249124 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" event={"ID":"28c759fb-ef33-4821-a1b1-499f7fecf939","Type":"ContainerStarted","Data":"d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab"} Nov 25 10:52:14 crc kubenswrapper[4702]: I1125 10:52:14.249573 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:14 crc kubenswrapper[4702]: I1125 10:52:14.282459 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" podStartSLOduration=2.282432948 podStartE2EDuration="2.282432948s" podCreationTimestamp="2025-11-25 10:52:12 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:52:14.273062477 +0000 UTC m=+1231.639658186" watchObservedRunningTime="2025-11-25 10:52:14.282432948 +0000 UTC m=+1231.649028637" Nov 25 10:52:15 crc kubenswrapper[4702]: I1125 10:52:15.258141 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"272d347b9d4642895dedc11ba1aec7becf7520474f3145b69f14b52ec045a606"} Nov 25 10:52:44 crc kubenswrapper[4702]: I1125 10:52:44.236932 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:45 crc kubenswrapper[4702]: E1125 10:52:45.336304 4702 log.go:32] "Failed when writing line to log file" err="http2: stream closed" path="/var/log/pods/keystone-kuttl-tests_keystone-6c9c98f9b6-b4nnx_28c759fb-ef33-4821-a1b1-499f7fecf939/keystone-api/0.log" line={} Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.650229 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-7568879df-66pjb"] Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.651066 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.659464 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7568879df-66pjb"] Nov 25 10:52:45 crc kubenswrapper[4702]: E1125 10:52:45.734611 4702 log.go:32] "Failed when writing line to log file" err="http2: stream closed" path="/var/log/pods/keystone-kuttl-tests_keystone-6c9c98f9b6-b4nnx_28c759fb-ef33-4821-a1b1-499f7fecf939/keystone-api/0.log" line={} Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.807713 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.807774 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.807810 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-587jk\" (UniqueName: \"kubernetes.io/projected/c2167dee-fe2a-4381-964a-301151108e3b-kube-api-access-587jk\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.807840 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.807930 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.909632 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.909700 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.909744 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.909795 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-587jk\" (UniqueName: \"kubernetes.io/projected/c2167dee-fe2a-4381-964a-301151108e3b-kube-api-access-587jk\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.909831 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.916036 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.916508 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.916582 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.921713 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.937351 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-587jk\" (UniqueName: \"kubernetes.io/projected/c2167dee-fe2a-4381-964a-301151108e3b-kube-api-access-587jk\") pod \"keystone-7568879df-66pjb\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:45 crc kubenswrapper[4702]: I1125 10:52:45.968487 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:46 crc kubenswrapper[4702]: I1125 10:52:46.169413 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7568879df-66pjb"] Nov 25 10:52:46 crc kubenswrapper[4702]: I1125 10:52:46.490945 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" event={"ID":"c2167dee-fe2a-4381-964a-301151108e3b","Type":"ContainerStarted","Data":"dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1"} Nov 25 10:52:46 crc kubenswrapper[4702]: I1125 10:52:46.491523 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:46 crc kubenswrapper[4702]: I1125 10:52:46.491537 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" event={"ID":"c2167dee-fe2a-4381-964a-301151108e3b","Type":"ContainerStarted","Data":"f345cafe7a829f8f8bfc28a2e999955a384b461fe1b239eb9649d3bf0c9dd756"} Nov 25 10:52:46 crc kubenswrapper[4702]: I1125 10:52:46.511411 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" podStartSLOduration=1.5113809169999999 podStartE2EDuration="1.511380917s" podCreationTimestamp="2025-11-25 10:52:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:52:46.507147855 +0000 UTC m=+1263.873743544" watchObservedRunningTime="2025-11-25 10:52:46.511380917 +0000 UTC m=+1263.877976606" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.111816 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-75hqj"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.118654 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-f7hkk"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.128635 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-f7hkk"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.137293 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-75hqj"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.145035 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7568879df-66pjb"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.150011 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.150281 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" podUID="28c759fb-ef33-4821-a1b1-499f7fecf939" containerName="keystone-api" containerID="cri-o://d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab" gracePeriod=30 Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.187073 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone7756-account-delete-jnf4z"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.187971 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.200232 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone7756-account-delete-jnf4z"] Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.334084 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c899f577-7826-4a51-9062-8753029f834d-operator-scripts\") pod \"keystone7756-account-delete-jnf4z\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.334168 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zn8fr\" (UniqueName: \"kubernetes.io/projected/c899f577-7826-4a51-9062-8753029f834d-kube-api-access-zn8fr\") pod \"keystone7756-account-delete-jnf4z\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.411400 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eb643af-1421-4dbb-9935-d6ef47da1571" path="/var/lib/kubelet/pods/4eb643af-1421-4dbb-9935-d6ef47da1571/volumes" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.412224 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8498651e-f22c-40ea-80c4-8502b93e8a9c" path="/var/lib/kubelet/pods/8498651e-f22c-40ea-80c4-8502b93e8a9c/volumes" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.435564 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c899f577-7826-4a51-9062-8753029f834d-operator-scripts\") pod \"keystone7756-account-delete-jnf4z\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.435634 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zn8fr\" (UniqueName: \"kubernetes.io/projected/c899f577-7826-4a51-9062-8753029f834d-kube-api-access-zn8fr\") pod \"keystone7756-account-delete-jnf4z\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.436452 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c899f577-7826-4a51-9062-8753029f834d-operator-scripts\") pod \"keystone7756-account-delete-jnf4z\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.453556 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zn8fr\" (UniqueName: \"kubernetes.io/projected/c899f577-7826-4a51-9062-8753029f834d-kube-api-access-zn8fr\") pod \"keystone7756-account-delete-jnf4z\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.496925 4702 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="keystone-kuttl-tests/keystone-7568879df-66pjb" secret="" err="secret \"keystone-keystone-dockercfg-mcfxj\" not found" Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.509576 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.638596 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639231 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:48.139201287 +0000 UTC m=+1265.505796976 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone-config-data" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639295 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639352 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639395 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:48.139367901 +0000 UTC m=+1265.505963590 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639450 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:48.139422993 +0000 UTC m=+1265.506018682 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone-scripts" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639497 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Nov 25 10:52:47 crc kubenswrapper[4702]: E1125 10:52:47.639518 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:48.139511706 +0000 UTC m=+1265.506107395 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone" not found Nov 25 10:52:47 crc kubenswrapper[4702]: I1125 10:52:47.727481 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone7756-account-delete-jnf4z"] Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145653 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-scripts: secret "keystone-scripts" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145726 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145763 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:49.145737636 +0000 UTC m=+1266.512333325 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "scripts" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone-scripts" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145827 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:49.145796018 +0000 UTC m=+1266.512391727 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "credential-keys" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145726 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone-config-data: secret "keystone-config-data" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145867 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:49.14585911 +0000 UTC m=+1266.512454879 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "config-data" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone-config-data" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145653 4702 secret.go:188] Couldn't get secret keystone-kuttl-tests/keystone: secret "keystone" not found Nov 25 10:52:48 crc kubenswrapper[4702]: E1125 10:52:48.145915 4702 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys podName:c2167dee-fe2a-4381-964a-301151108e3b nodeName:}" failed. No retries permitted until 2025-11-25 10:52:49.145889941 +0000 UTC m=+1266.512485700 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "fernet-keys" (UniqueName: "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys") pod "keystone-7568879df-66pjb" (UID: "c2167dee-fe2a-4381-964a-301151108e3b") : secret "keystone" not found Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.504646 4702 generic.go:334] "Generic (PLEG): container finished" podID="c899f577-7826-4a51-9062-8753029f834d" containerID="1616714aea9f92174fb037e2310c033518f64aa92b0beed6a29bc963c89a81e5" exitCode=0 Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.504740 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" event={"ID":"c899f577-7826-4a51-9062-8753029f834d","Type":"ContainerDied","Data":"1616714aea9f92174fb037e2310c033518f64aa92b0beed6a29bc963c89a81e5"} Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.504774 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" event={"ID":"c899f577-7826-4a51-9062-8753029f834d","Type":"ContainerStarted","Data":"319e95a3dfcc1967b96073f8e5e400429cc04fe9dbcd17067e34f79c5cf826e0"} Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.504991 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" podUID="c2167dee-fe2a-4381-964a-301151108e3b" containerName="keystone-api" containerID="cri-o://dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1" gracePeriod=30 Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.875941 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.955452 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys\") pod \"c2167dee-fe2a-4381-964a-301151108e3b\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.955577 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys\") pod \"c2167dee-fe2a-4381-964a-301151108e3b\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.955630 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-587jk\" (UniqueName: \"kubernetes.io/projected/c2167dee-fe2a-4381-964a-301151108e3b-kube-api-access-587jk\") pod \"c2167dee-fe2a-4381-964a-301151108e3b\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.955694 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts\") pod \"c2167dee-fe2a-4381-964a-301151108e3b\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.955737 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data\") pod \"c2167dee-fe2a-4381-964a-301151108e3b\" (UID: \"c2167dee-fe2a-4381-964a-301151108e3b\") " Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.961395 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c2167dee-fe2a-4381-964a-301151108e3b-kube-api-access-587jk" (OuterVolumeSpecName: "kube-api-access-587jk") pod "c2167dee-fe2a-4381-964a-301151108e3b" (UID: "c2167dee-fe2a-4381-964a-301151108e3b"). InnerVolumeSpecName "kube-api-access-587jk". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.961546 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c2167dee-fe2a-4381-964a-301151108e3b" (UID: "c2167dee-fe2a-4381-964a-301151108e3b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.967804 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts" (OuterVolumeSpecName: "scripts") pod "c2167dee-fe2a-4381-964a-301151108e3b" (UID: "c2167dee-fe2a-4381-964a-301151108e3b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.968040 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c2167dee-fe2a-4381-964a-301151108e3b" (UID: "c2167dee-fe2a-4381-964a-301151108e3b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:48 crc kubenswrapper[4702]: I1125 10:52:48.984252 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data" (OuterVolumeSpecName: "config-data") pod "c2167dee-fe2a-4381-964a-301151108e3b" (UID: "c2167dee-fe2a-4381-964a-301151108e3b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.058140 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.058195 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.058209 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-587jk\" (UniqueName: \"kubernetes.io/projected/c2167dee-fe2a-4381-964a-301151108e3b-kube-api-access-587jk\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.058222 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.058234 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c2167dee-fe2a-4381-964a-301151108e3b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.514276 4702 generic.go:334] "Generic (PLEG): container finished" podID="c2167dee-fe2a-4381-964a-301151108e3b" containerID="dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1" exitCode=0 Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.514341 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.514331 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" event={"ID":"c2167dee-fe2a-4381-964a-301151108e3b","Type":"ContainerDied","Data":"dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1"} Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.514741 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7568879df-66pjb" event={"ID":"c2167dee-fe2a-4381-964a-301151108e3b","Type":"ContainerDied","Data":"f345cafe7a829f8f8bfc28a2e999955a384b461fe1b239eb9649d3bf0c9dd756"} Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.514772 4702 scope.go:117] "RemoveContainer" containerID="dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.543958 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7568879df-66pjb"] Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.545202 4702 scope.go:117] "RemoveContainer" containerID="dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1" Nov 25 10:52:49 crc kubenswrapper[4702]: E1125 10:52:49.546029 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1\": container with ID starting with dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1 not found: ID does not exist" containerID="dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.546069 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1"} err="failed to get container status \"dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1\": rpc error: code = NotFound desc = could not find container \"dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1\": container with ID starting with dd93dae9078b5e3306e64b6b149730a62ebcd318e38f0ebdc3af1aa8f5f45bf1 not found: ID does not exist" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.552670 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-7568879df-66pjb"] Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.787863 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.867831 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zn8fr\" (UniqueName: \"kubernetes.io/projected/c899f577-7826-4a51-9062-8753029f834d-kube-api-access-zn8fr\") pod \"c899f577-7826-4a51-9062-8753029f834d\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.868038 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c899f577-7826-4a51-9062-8753029f834d-operator-scripts\") pod \"c899f577-7826-4a51-9062-8753029f834d\" (UID: \"c899f577-7826-4a51-9062-8753029f834d\") " Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.868879 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c899f577-7826-4a51-9062-8753029f834d-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c899f577-7826-4a51-9062-8753029f834d" (UID: "c899f577-7826-4a51-9062-8753029f834d"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.873209 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c899f577-7826-4a51-9062-8753029f834d-kube-api-access-zn8fr" (OuterVolumeSpecName: "kube-api-access-zn8fr") pod "c899f577-7826-4a51-9062-8753029f834d" (UID: "c899f577-7826-4a51-9062-8753029f834d"). InnerVolumeSpecName "kube-api-access-zn8fr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.970201 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zn8fr\" (UniqueName: \"kubernetes.io/projected/c899f577-7826-4a51-9062-8753029f834d-kube-api-access-zn8fr\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:49 crc kubenswrapper[4702]: I1125 10:52:49.971007 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c899f577-7826-4a51-9062-8753029f834d-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.511921 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.529228 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" event={"ID":"c899f577-7826-4a51-9062-8753029f834d","Type":"ContainerDied","Data":"319e95a3dfcc1967b96073f8e5e400429cc04fe9dbcd17067e34f79c5cf826e0"} Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.529278 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="319e95a3dfcc1967b96073f8e5e400429cc04fe9dbcd17067e34f79c5cf826e0" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.529336 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone7756-account-delete-jnf4z" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.535146 4702 generic.go:334] "Generic (PLEG): container finished" podID="28c759fb-ef33-4821-a1b1-499f7fecf939" containerID="d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab" exitCode=0 Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.535201 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" event={"ID":"28c759fb-ef33-4821-a1b1-499f7fecf939","Type":"ContainerDied","Data":"d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab"} Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.535231 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" event={"ID":"28c759fb-ef33-4821-a1b1-499f7fecf939","Type":"ContainerDied","Data":"d24a573ce5746d68e951fd99aa5207ee3a4faa28de745deaae2ab694d3d5db60"} Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.535250 4702 scope.go:117] "RemoveContainer" containerID="d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.535345 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.561227 4702 scope.go:117] "RemoveContainer" containerID="d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab" Nov 25 10:52:50 crc kubenswrapper[4702]: E1125 10:52:50.561567 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab\": container with ID starting with d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab not found: ID does not exist" containerID="d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.561595 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab"} err="failed to get container status \"d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab\": rpc error: code = NotFound desc = could not find container \"d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab\": container with ID starting with d1592f30e151c29f97ff0bb3d47793456013261ebebac8b52c281387400773ab not found: ID does not exist" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.581478 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7gm6x\" (UniqueName: \"kubernetes.io/projected/28c759fb-ef33-4821-a1b1-499f7fecf939-kube-api-access-7gm6x\") pod \"28c759fb-ef33-4821-a1b1-499f7fecf939\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.581633 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-credential-keys\") pod \"28c759fb-ef33-4821-a1b1-499f7fecf939\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.581665 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-config-data\") pod \"28c759fb-ef33-4821-a1b1-499f7fecf939\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.581822 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-fernet-keys\") pod \"28c759fb-ef33-4821-a1b1-499f7fecf939\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.582013 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-scripts\") pod \"28c759fb-ef33-4821-a1b1-499f7fecf939\" (UID: \"28c759fb-ef33-4821-a1b1-499f7fecf939\") " Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.586042 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-scripts" (OuterVolumeSpecName: "scripts") pod "28c759fb-ef33-4821-a1b1-499f7fecf939" (UID: "28c759fb-ef33-4821-a1b1-499f7fecf939"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.586573 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "28c759fb-ef33-4821-a1b1-499f7fecf939" (UID: "28c759fb-ef33-4821-a1b1-499f7fecf939"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.587367 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28c759fb-ef33-4821-a1b1-499f7fecf939-kube-api-access-7gm6x" (OuterVolumeSpecName: "kube-api-access-7gm6x") pod "28c759fb-ef33-4821-a1b1-499f7fecf939" (UID: "28c759fb-ef33-4821-a1b1-499f7fecf939"). InnerVolumeSpecName "kube-api-access-7gm6x". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.591780 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "28c759fb-ef33-4821-a1b1-499f7fecf939" (UID: "28c759fb-ef33-4821-a1b1-499f7fecf939"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.602779 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-config-data" (OuterVolumeSpecName: "config-data") pod "28c759fb-ef33-4821-a1b1-499f7fecf939" (UID: "28c759fb-ef33-4821-a1b1-499f7fecf939"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.684361 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7gm6x\" (UniqueName: \"kubernetes.io/projected/28c759fb-ef33-4821-a1b1-499f7fecf939-kube-api-access-7gm6x\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.684423 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.684437 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.684447 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.684457 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/28c759fb-ef33-4821-a1b1-499f7fecf939-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.870081 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx"] Nov 25 10:52:50 crc kubenswrapper[4702]: I1125 10:52:50.878556 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-6c9c98f9b6-b4nnx"] Nov 25 10:52:51 crc kubenswrapper[4702]: I1125 10:52:51.409915 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28c759fb-ef33-4821-a1b1-499f7fecf939" path="/var/lib/kubelet/pods/28c759fb-ef33-4821-a1b1-499f7fecf939/volumes" Nov 25 10:52:51 crc kubenswrapper[4702]: I1125 10:52:51.410808 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c2167dee-fe2a-4381-964a-301151108e3b" path="/var/lib/kubelet/pods/c2167dee-fe2a-4381-964a-301151108e3b/volumes" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.203748 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-p7dzq"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.210455 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-p7dzq"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.226682 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.232383 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone7756-account-delete-jnf4z"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.237721 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-7756-account-create-update-jdv6s"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.243142 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone7756-account-delete-jnf4z"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.310960 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-jwtgn"] Nov 25 10:52:52 crc kubenswrapper[4702]: E1125 10:52:52.311280 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28c759fb-ef33-4821-a1b1-499f7fecf939" containerName="keystone-api" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.311303 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="28c759fb-ef33-4821-a1b1-499f7fecf939" containerName="keystone-api" Nov 25 10:52:52 crc kubenswrapper[4702]: E1125 10:52:52.311329 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c899f577-7826-4a51-9062-8753029f834d" containerName="mariadb-account-delete" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.311336 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c899f577-7826-4a51-9062-8753029f834d" containerName="mariadb-account-delete" Nov 25 10:52:52 crc kubenswrapper[4702]: E1125 10:52:52.311354 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c2167dee-fe2a-4381-964a-301151108e3b" containerName="keystone-api" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.311364 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c2167dee-fe2a-4381-964a-301151108e3b" containerName="keystone-api" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.311499 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c899f577-7826-4a51-9062-8753029f834d" containerName="mariadb-account-delete" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.311531 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c2167dee-fe2a-4381-964a-301151108e3b" containerName="keystone-api" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.311545 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="28c759fb-ef33-4821-a1b1-499f7fecf939" containerName="keystone-api" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.312081 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.317775 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-jwtgn"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.408873 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjzzz\" (UniqueName: \"kubernetes.io/projected/30a2ef62-8e88-436b-9d79-8794b7e637fa-kube-api-access-qjzzz\") pod \"keystone-db-create-jwtgn\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.409156 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30a2ef62-8e88-436b-9d79-8794b7e637fa-operator-scripts\") pod \"keystone-db-create-jwtgn\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.419919 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.420720 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.423794 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.438178 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj"] Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.511363 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13830540-0ae9-4c80-b8b7-3d170f518a69-operator-scripts\") pod \"keystone-0d7e-account-create-update-xfmrj\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.511622 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5zng\" (UniqueName: \"kubernetes.io/projected/13830540-0ae9-4c80-b8b7-3d170f518a69-kube-api-access-t5zng\") pod \"keystone-0d7e-account-create-update-xfmrj\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.511757 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjzzz\" (UniqueName: \"kubernetes.io/projected/30a2ef62-8e88-436b-9d79-8794b7e637fa-kube-api-access-qjzzz\") pod \"keystone-db-create-jwtgn\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.511795 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30a2ef62-8e88-436b-9d79-8794b7e637fa-operator-scripts\") pod \"keystone-db-create-jwtgn\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.512632 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30a2ef62-8e88-436b-9d79-8794b7e637fa-operator-scripts\") pod \"keystone-db-create-jwtgn\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.536247 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjzzz\" (UniqueName: \"kubernetes.io/projected/30a2ef62-8e88-436b-9d79-8794b7e637fa-kube-api-access-qjzzz\") pod \"keystone-db-create-jwtgn\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.613965 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13830540-0ae9-4c80-b8b7-3d170f518a69-operator-scripts\") pod \"keystone-0d7e-account-create-update-xfmrj\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.614644 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13830540-0ae9-4c80-b8b7-3d170f518a69-operator-scripts\") pod \"keystone-0d7e-account-create-update-xfmrj\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.614817 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5zng\" (UniqueName: \"kubernetes.io/projected/13830540-0ae9-4c80-b8b7-3d170f518a69-kube-api-access-t5zng\") pod \"keystone-0d7e-account-create-update-xfmrj\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.632919 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.633333 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5zng\" (UniqueName: \"kubernetes.io/projected/13830540-0ae9-4c80-b8b7-3d170f518a69-kube-api-access-t5zng\") pod \"keystone-0d7e-account-create-update-xfmrj\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.737382 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:52 crc kubenswrapper[4702]: I1125 10:52:52.862326 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-jwtgn"] Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.159383 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj"] Nov 25 10:52:53 crc kubenswrapper[4702]: W1125 10:52:53.163160 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod13830540_0ae9_4c80_b8b7_3d170f518a69.slice/crio-95cc1ef8f03821bb75a2d9d4e19a8a9cafe683eac79c04031aee21c74da1923e WatchSource:0}: Error finding container 95cc1ef8f03821bb75a2d9d4e19a8a9cafe683eac79c04031aee21c74da1923e: Status 404 returned error can't find the container with id 95cc1ef8f03821bb75a2d9d4e19a8a9cafe683eac79c04031aee21c74da1923e Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.414477 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2f573dd2-7fba-4c83-88a5-af809f6a1945" path="/var/lib/kubelet/pods/2f573dd2-7fba-4c83-88a5-af809f6a1945/volumes" Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.415755 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="359938d3-a87f-48ab-98dc-f2c9ca99f6ee" path="/var/lib/kubelet/pods/359938d3-a87f-48ab-98dc-f2c9ca99f6ee/volumes" Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.416403 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c899f577-7826-4a51-9062-8753029f834d" path="/var/lib/kubelet/pods/c899f577-7826-4a51-9062-8753029f834d/volumes" Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.559832 4702 generic.go:334] "Generic (PLEG): container finished" podID="30a2ef62-8e88-436b-9d79-8794b7e637fa" containerID="280c65ba8ebe99528481bd89ba715c42c20e53181145a079e3dc4578b4273289" exitCode=0 Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.559929 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" event={"ID":"30a2ef62-8e88-436b-9d79-8794b7e637fa","Type":"ContainerDied","Data":"280c65ba8ebe99528481bd89ba715c42c20e53181145a079e3dc4578b4273289"} Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.559980 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" event={"ID":"30a2ef62-8e88-436b-9d79-8794b7e637fa","Type":"ContainerStarted","Data":"9b2cb6398779954d41fb8e201e3d9cfc5283db1aef4cb37accc3cba6b25632ad"} Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.562176 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" event={"ID":"13830540-0ae9-4c80-b8b7-3d170f518a69","Type":"ContainerStarted","Data":"bc75a92a5503a3e979a7efb806a25558412a7d8ea74dfb21f616f440706bd82a"} Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.562228 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" event={"ID":"13830540-0ae9-4c80-b8b7-3d170f518a69","Type":"ContainerStarted","Data":"95cc1ef8f03821bb75a2d9d4e19a8a9cafe683eac79c04031aee21c74da1923e"} Nov 25 10:52:53 crc kubenswrapper[4702]: I1125 10:52:53.588346 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" podStartSLOduration=1.588320961 podStartE2EDuration="1.588320961s" podCreationTimestamp="2025-11-25 10:52:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:52:53.587958481 +0000 UTC m=+1270.954554190" watchObservedRunningTime="2025-11-25 10:52:53.588320961 +0000 UTC m=+1270.954916650" Nov 25 10:52:54 crc kubenswrapper[4702]: I1125 10:52:54.806531 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:54 crc kubenswrapper[4702]: I1125 10:52:54.971084 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qjzzz\" (UniqueName: \"kubernetes.io/projected/30a2ef62-8e88-436b-9d79-8794b7e637fa-kube-api-access-qjzzz\") pod \"30a2ef62-8e88-436b-9d79-8794b7e637fa\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " Nov 25 10:52:54 crc kubenswrapper[4702]: I1125 10:52:54.971147 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30a2ef62-8e88-436b-9d79-8794b7e637fa-operator-scripts\") pod \"30a2ef62-8e88-436b-9d79-8794b7e637fa\" (UID: \"30a2ef62-8e88-436b-9d79-8794b7e637fa\") " Nov 25 10:52:54 crc kubenswrapper[4702]: I1125 10:52:54.971941 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/30a2ef62-8e88-436b-9d79-8794b7e637fa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "30a2ef62-8e88-436b-9d79-8794b7e637fa" (UID: "30a2ef62-8e88-436b-9d79-8794b7e637fa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:52:54 crc kubenswrapper[4702]: I1125 10:52:54.978363 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30a2ef62-8e88-436b-9d79-8794b7e637fa-kube-api-access-qjzzz" (OuterVolumeSpecName: "kube-api-access-qjzzz") pod "30a2ef62-8e88-436b-9d79-8794b7e637fa" (UID: "30a2ef62-8e88-436b-9d79-8794b7e637fa"). InnerVolumeSpecName "kube-api-access-qjzzz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.073392 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qjzzz\" (UniqueName: \"kubernetes.io/projected/30a2ef62-8e88-436b-9d79-8794b7e637fa-kube-api-access-qjzzz\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.073431 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/30a2ef62-8e88-436b-9d79-8794b7e637fa-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.579252 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" event={"ID":"30a2ef62-8e88-436b-9d79-8794b7e637fa","Type":"ContainerDied","Data":"9b2cb6398779954d41fb8e201e3d9cfc5283db1aef4cb37accc3cba6b25632ad"} Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.579298 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9b2cb6398779954d41fb8e201e3d9cfc5283db1aef4cb37accc3cba6b25632ad" Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.579767 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-jwtgn" Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.580780 4702 generic.go:334] "Generic (PLEG): container finished" podID="13830540-0ae9-4c80-b8b7-3d170f518a69" containerID="bc75a92a5503a3e979a7efb806a25558412a7d8ea74dfb21f616f440706bd82a" exitCode=0 Nov 25 10:52:55 crc kubenswrapper[4702]: I1125 10:52:55.580827 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" event={"ID":"13830540-0ae9-4c80-b8b7-3d170f518a69","Type":"ContainerDied","Data":"bc75a92a5503a3e979a7efb806a25558412a7d8ea74dfb21f616f440706bd82a"} Nov 25 10:52:56 crc kubenswrapper[4702]: I1125 10:52:56.836375 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.004683 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13830540-0ae9-4c80-b8b7-3d170f518a69-operator-scripts\") pod \"13830540-0ae9-4c80-b8b7-3d170f518a69\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.004783 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5zng\" (UniqueName: \"kubernetes.io/projected/13830540-0ae9-4c80-b8b7-3d170f518a69-kube-api-access-t5zng\") pod \"13830540-0ae9-4c80-b8b7-3d170f518a69\" (UID: \"13830540-0ae9-4c80-b8b7-3d170f518a69\") " Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.005733 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/13830540-0ae9-4c80-b8b7-3d170f518a69-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "13830540-0ae9-4c80-b8b7-3d170f518a69" (UID: "13830540-0ae9-4c80-b8b7-3d170f518a69"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.009613 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/13830540-0ae9-4c80-b8b7-3d170f518a69-kube-api-access-t5zng" (OuterVolumeSpecName: "kube-api-access-t5zng") pod "13830540-0ae9-4c80-b8b7-3d170f518a69" (UID: "13830540-0ae9-4c80-b8b7-3d170f518a69"). InnerVolumeSpecName "kube-api-access-t5zng". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.106537 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/13830540-0ae9-4c80-b8b7-3d170f518a69-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.106581 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5zng\" (UniqueName: \"kubernetes.io/projected/13830540-0ae9-4c80-b8b7-3d170f518a69-kube-api-access-t5zng\") on node \"crc\" DevicePath \"\"" Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.595132 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" event={"ID":"13830540-0ae9-4c80-b8b7-3d170f518a69","Type":"ContainerDied","Data":"95cc1ef8f03821bb75a2d9d4e19a8a9cafe683eac79c04031aee21c74da1923e"} Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.595483 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="95cc1ef8f03821bb75a2d9d4e19a8a9cafe683eac79c04031aee21c74da1923e" Nov 25 10:52:57 crc kubenswrapper[4702]: I1125 10:52:57.595237 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.993344 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-g947l"] Nov 25 10:53:02 crc kubenswrapper[4702]: E1125 10:53:02.994224 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="13830540-0ae9-4c80-b8b7-3d170f518a69" containerName="mariadb-account-create-update" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.994241 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="13830540-0ae9-4c80-b8b7-3d170f518a69" containerName="mariadb-account-create-update" Nov 25 10:53:02 crc kubenswrapper[4702]: E1125 10:53:02.994256 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30a2ef62-8e88-436b-9d79-8794b7e637fa" containerName="mariadb-database-create" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.994265 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="30a2ef62-8e88-436b-9d79-8794b7e637fa" containerName="mariadb-database-create" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.994402 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="13830540-0ae9-4c80-b8b7-3d170f518a69" containerName="mariadb-account-create-update" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.994416 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="30a2ef62-8e88-436b-9d79-8794b7e637fa" containerName="mariadb-database-create" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.994990 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.996832 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-4m7lv" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.997204 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.997234 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:53:02 crc kubenswrapper[4702]: I1125 10:53:02.998032 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.004925 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-g947l"] Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.091451 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6nqb7\" (UniqueName: \"kubernetes.io/projected/1480bd7a-6a56-4ba2-8909-255f69b95fb7-kube-api-access-6nqb7\") pod \"keystone-db-sync-g947l\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.091522 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1480bd7a-6a56-4ba2-8909-255f69b95fb7-config-data\") pod \"keystone-db-sync-g947l\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.192749 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6nqb7\" (UniqueName: \"kubernetes.io/projected/1480bd7a-6a56-4ba2-8909-255f69b95fb7-kube-api-access-6nqb7\") pod \"keystone-db-sync-g947l\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.192818 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1480bd7a-6a56-4ba2-8909-255f69b95fb7-config-data\") pod \"keystone-db-sync-g947l\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.199832 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1480bd7a-6a56-4ba2-8909-255f69b95fb7-config-data\") pod \"keystone-db-sync-g947l\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.209815 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6nqb7\" (UniqueName: \"kubernetes.io/projected/1480bd7a-6a56-4ba2-8909-255f69b95fb7-kube-api-access-6nqb7\") pod \"keystone-db-sync-g947l\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.362171 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:03 crc kubenswrapper[4702]: I1125 10:53:03.691178 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-g947l"] Nov 25 10:53:04 crc kubenswrapper[4702]: I1125 10:53:04.658574 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-g947l" event={"ID":"1480bd7a-6a56-4ba2-8909-255f69b95fb7","Type":"ContainerStarted","Data":"453e84ed84e16e2db801e37829458dc8a2bf0e5e8acaaaa734dd47c3e759221a"} Nov 25 10:53:04 crc kubenswrapper[4702]: I1125 10:53:04.660918 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-g947l" event={"ID":"1480bd7a-6a56-4ba2-8909-255f69b95fb7","Type":"ContainerStarted","Data":"0257e635a2bc9219dae949f7d1a8cea53b95119e170c31efb5891e267952452a"} Nov 25 10:53:04 crc kubenswrapper[4702]: I1125 10:53:04.685895 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-g947l" podStartSLOduration=2.685867227 podStartE2EDuration="2.685867227s" podCreationTimestamp="2025-11-25 10:53:02 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:53:04.679776861 +0000 UTC m=+1282.046372730" watchObservedRunningTime="2025-11-25 10:53:04.685867227 +0000 UTC m=+1282.052462916" Nov 25 10:53:05 crc kubenswrapper[4702]: I1125 10:53:05.667512 4702 generic.go:334] "Generic (PLEG): container finished" podID="1480bd7a-6a56-4ba2-8909-255f69b95fb7" containerID="453e84ed84e16e2db801e37829458dc8a2bf0e5e8acaaaa734dd47c3e759221a" exitCode=0 Nov 25 10:53:05 crc kubenswrapper[4702]: I1125 10:53:05.668538 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-g947l" event={"ID":"1480bd7a-6a56-4ba2-8909-255f69b95fb7","Type":"ContainerDied","Data":"453e84ed84e16e2db801e37829458dc8a2bf0e5e8acaaaa734dd47c3e759221a"} Nov 25 10:53:06 crc kubenswrapper[4702]: I1125 10:53:06.917685 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.049317 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6nqb7\" (UniqueName: \"kubernetes.io/projected/1480bd7a-6a56-4ba2-8909-255f69b95fb7-kube-api-access-6nqb7\") pod \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.049463 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1480bd7a-6a56-4ba2-8909-255f69b95fb7-config-data\") pod \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\" (UID: \"1480bd7a-6a56-4ba2-8909-255f69b95fb7\") " Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.054182 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1480bd7a-6a56-4ba2-8909-255f69b95fb7-kube-api-access-6nqb7" (OuterVolumeSpecName: "kube-api-access-6nqb7") pod "1480bd7a-6a56-4ba2-8909-255f69b95fb7" (UID: "1480bd7a-6a56-4ba2-8909-255f69b95fb7"). InnerVolumeSpecName "kube-api-access-6nqb7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.081760 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1480bd7a-6a56-4ba2-8909-255f69b95fb7-config-data" (OuterVolumeSpecName: "config-data") pod "1480bd7a-6a56-4ba2-8909-255f69b95fb7" (UID: "1480bd7a-6a56-4ba2-8909-255f69b95fb7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.150605 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6nqb7\" (UniqueName: \"kubernetes.io/projected/1480bd7a-6a56-4ba2-8909-255f69b95fb7-kube-api-access-6nqb7\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.150640 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/1480bd7a-6a56-4ba2-8909-255f69b95fb7-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.685752 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-g947l" event={"ID":"1480bd7a-6a56-4ba2-8909-255f69b95fb7","Type":"ContainerDied","Data":"0257e635a2bc9219dae949f7d1a8cea53b95119e170c31efb5891e267952452a"} Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.686181 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0257e635a2bc9219dae949f7d1a8cea53b95119e170c31efb5891e267952452a" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.686102 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-g947l" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.857665 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-9vh8h"] Nov 25 10:53:07 crc kubenswrapper[4702]: E1125 10:53:07.858027 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1480bd7a-6a56-4ba2-8909-255f69b95fb7" containerName="keystone-db-sync" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.858041 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1480bd7a-6a56-4ba2-8909-255f69b95fb7" containerName="keystone-db-sync" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.858198 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="1480bd7a-6a56-4ba2-8909-255f69b95fb7" containerName="keystone-db-sync" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.858880 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.870431 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.871788 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.872018 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-9vh8h"] Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.873141 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.873304 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-4m7lv" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.873379 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.961871 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-scripts\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.961990 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-fernet-keys\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.962014 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-credential-keys\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.962096 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-config-data\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:07 crc kubenswrapper[4702]: I1125 10:53:07.962128 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b42rj\" (UniqueName: \"kubernetes.io/projected/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-kube-api-access-b42rj\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.063431 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-config-data\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.063533 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b42rj\" (UniqueName: \"kubernetes.io/projected/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-kube-api-access-b42rj\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.063573 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-scripts\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.063662 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-fernet-keys\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.064271 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-credential-keys\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.067381 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-credential-keys\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.067557 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-fernet-keys\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.067584 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-config-data\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.068314 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-scripts\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.082606 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b42rj\" (UniqueName: \"kubernetes.io/projected/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-kube-api-access-b42rj\") pod \"keystone-bootstrap-9vh8h\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.180946 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.651573 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-9vh8h"] Nov 25 10:53:08 crc kubenswrapper[4702]: I1125 10:53:08.694588 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" event={"ID":"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f","Type":"ContainerStarted","Data":"b15a92bb235b5cac8ed29cc221e27c6b71cd52aecc259a47c23fde69ec4c85e0"} Nov 25 10:53:09 crc kubenswrapper[4702]: I1125 10:53:09.703146 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" event={"ID":"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f","Type":"ContainerStarted","Data":"97f695ecd3e4a0dcaff75ab259c3fa4134cbbf812adfe5c163b4ea8e86a41780"} Nov 25 10:53:09 crc kubenswrapper[4702]: I1125 10:53:09.726328 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" podStartSLOduration=2.726299461 podStartE2EDuration="2.726299461s" podCreationTimestamp="2025-11-25 10:53:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:53:09.718640249 +0000 UTC m=+1287.085235948" watchObservedRunningTime="2025-11-25 10:53:09.726299461 +0000 UTC m=+1287.092895150" Nov 25 10:53:11 crc kubenswrapper[4702]: I1125 10:53:11.720296 4702 generic.go:334] "Generic (PLEG): container finished" podID="5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" containerID="97f695ecd3e4a0dcaff75ab259c3fa4134cbbf812adfe5c163b4ea8e86a41780" exitCode=0 Nov 25 10:53:11 crc kubenswrapper[4702]: I1125 10:53:11.720412 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" event={"ID":"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f","Type":"ContainerDied","Data":"97f695ecd3e4a0dcaff75ab259c3fa4134cbbf812adfe5c163b4ea8e86a41780"} Nov 25 10:53:12 crc kubenswrapper[4702]: I1125 10:53:12.990745 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.148155 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-fernet-keys\") pod \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.148523 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-config-data\") pod \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.148571 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b42rj\" (UniqueName: \"kubernetes.io/projected/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-kube-api-access-b42rj\") pod \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.148604 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-credential-keys\") pod \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.148664 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-scripts\") pod \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\" (UID: \"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f\") " Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.154706 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-kube-api-access-b42rj" (OuterVolumeSpecName: "kube-api-access-b42rj") pod "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" (UID: "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f"). InnerVolumeSpecName "kube-api-access-b42rj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.154753 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" (UID: "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.156983 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-scripts" (OuterVolumeSpecName: "scripts") pod "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" (UID: "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.157537 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" (UID: "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.172114 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-config-data" (OuterVolumeSpecName: "config-data") pod "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" (UID: "5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.250101 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.250136 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.250146 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.250158 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b42rj\" (UniqueName: \"kubernetes.io/projected/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-kube-api-access-b42rj\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.250169 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.747861 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" event={"ID":"5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f","Type":"ContainerDied","Data":"b15a92bb235b5cac8ed29cc221e27c6b71cd52aecc259a47c23fde69ec4c85e0"} Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.747934 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b15a92bb235b5cac8ed29cc221e27c6b71cd52aecc259a47c23fde69ec4c85e0" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.747950 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-9vh8h" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.915005 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8"] Nov 25 10:53:13 crc kubenswrapper[4702]: E1125 10:53:13.915323 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" containerName="keystone-bootstrap" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.915346 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" containerName="keystone-bootstrap" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.915517 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" containerName="keystone-bootstrap" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.916119 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.918095 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.918104 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.918530 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-4m7lv" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.919021 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:53:13 crc kubenswrapper[4702]: I1125 10:53:13.925624 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8"] Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.061570 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-credential-keys\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.061643 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-config-data\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.061945 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-fernet-keys\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.062000 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-scripts\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.062048 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6mrrn\" (UniqueName: \"kubernetes.io/projected/738a98d4-029e-4a13-b7ed-8af9beb67555-kube-api-access-6mrrn\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.163376 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-fernet-keys\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.163654 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-scripts\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.163753 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6mrrn\" (UniqueName: \"kubernetes.io/projected/738a98d4-029e-4a13-b7ed-8af9beb67555-kube-api-access-6mrrn\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.163884 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-credential-keys\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.164052 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-config-data\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.167358 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-credential-keys\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.167353 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-scripts\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.167462 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-fernet-keys\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.173084 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-config-data\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.184150 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6mrrn\" (UniqueName: \"kubernetes.io/projected/738a98d4-029e-4a13-b7ed-8af9beb67555-kube-api-access-6mrrn\") pod \"keystone-7bb6db6c57-jr9b8\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.241295 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.653606 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8"] Nov 25 10:53:14 crc kubenswrapper[4702]: I1125 10:53:14.756371 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" event={"ID":"738a98d4-029e-4a13-b7ed-8af9beb67555","Type":"ContainerStarted","Data":"1e516fa06f59b00b64cba15908479c7c4459b4228a3d69f875b8c1e54ca4bf31"} Nov 25 10:53:15 crc kubenswrapper[4702]: I1125 10:53:15.986686 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" event={"ID":"738a98d4-029e-4a13-b7ed-8af9beb67555","Type":"ContainerStarted","Data":"de768052438fa7c9e8b67f7097a5b420fa8a245528196af1b27048d9c6a3dcac"} Nov 25 10:53:15 crc kubenswrapper[4702]: I1125 10:53:15.988056 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:16 crc kubenswrapper[4702]: I1125 10:53:16.011543 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" podStartSLOduration=3.011503613 podStartE2EDuration="3.011503613s" podCreationTimestamp="2025-11-25 10:53:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:53:16.004422188 +0000 UTC m=+1293.371017877" watchObservedRunningTime="2025-11-25 10:53:16.011503613 +0000 UTC m=+1293.378099342" Nov 25 10:53:45 crc kubenswrapper[4702]: I1125 10:53:45.726804 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.048273 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf"] Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.049859 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.059505 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-84ssl"] Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.060722 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.066242 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf"] Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.074896 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-84ssl"] Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.118306 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-scripts\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.118356 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-credential-keys\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.118511 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2m8xj\" (UniqueName: \"kubernetes.io/projected/ef309bc8-cb73-4693-86f3-796b61df2f08-kube-api-access-2m8xj\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.118599 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-config-data\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.118796 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-fernet-keys\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.220811 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-fernet-keys\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.220882 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-spr28\" (UniqueName: \"kubernetes.io/projected/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-kube-api-access-spr28\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.220944 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-credential-keys\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.220977 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-scripts\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.221061 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-scripts\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.221086 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-config-data\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.221119 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-credential-keys\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.221144 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2m8xj\" (UniqueName: \"kubernetes.io/projected/ef309bc8-cb73-4693-86f3-796b61df2f08-kube-api-access-2m8xj\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.221170 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-config-data\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.221236 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-fernet-keys\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.228746 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-fernet-keys\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.229133 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-scripts\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.230093 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-config-data\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.231077 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-credential-keys\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.239787 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2m8xj\" (UniqueName: \"kubernetes.io/projected/ef309bc8-cb73-4693-86f3-796b61df2f08-kube-api-access-2m8xj\") pod \"keystone-7bb6db6c57-c6hgf\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.322733 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-fernet-keys\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.322780 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-spr28\" (UniqueName: \"kubernetes.io/projected/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-kube-api-access-spr28\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.322806 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-credential-keys\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.322827 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-scripts\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.322855 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-config-data\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.328520 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-scripts\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.329186 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-config-data\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.329289 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-fernet-keys\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.329813 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-credential-keys\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.341395 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-spr28\" (UniqueName: \"kubernetes.io/projected/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-kube-api-access-spr28\") pod \"keystone-7bb6db6c57-84ssl\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.368620 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.390025 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.593735 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf"] Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.663621 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-84ssl"] Nov 25 10:53:47 crc kubenswrapper[4702]: W1125 10:53:47.675498 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda0bc1bd5_b2e3_464b_83df_0f2242b5150e.slice/crio-7ee49d014cf8c1ba0f1732078093a230943bcd0e6e599be95bcfadbdc0459a73 WatchSource:0}: Error finding container 7ee49d014cf8c1ba0f1732078093a230943bcd0e6e599be95bcfadbdc0459a73: Status 404 returned error can't find the container with id 7ee49d014cf8c1ba0f1732078093a230943bcd0e6e599be95bcfadbdc0459a73 Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.691759 4702 scope.go:117] "RemoveContainer" containerID="7af4d97cc51f1fa783f8f8f93574e232b6a510d873e307c657b4efc7abc51c53" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.729056 4702 scope.go:117] "RemoveContainer" containerID="c856ff0ed01792523c5a4ba038f14795e0026e044221a1f7a6cb45c13a0ea0dd" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.755245 4702 scope.go:117] "RemoveContainer" containerID="286892a795e8026ff2e6bb1fd2e1cd45b0eff5275fc2b779335d0adb4991e590" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.775281 4702 scope.go:117] "RemoveContainer" containerID="42c5968f115a69d31b405bea0397d7b65ed69d2ea54799aa950c09c8f3c12177" Nov 25 10:53:47 crc kubenswrapper[4702]: I1125 10:53:47.790380 4702 scope.go:117] "RemoveContainer" containerID="7c98a06037a9bae3167a3dea7d71b455ead738e8a6dd90efc2f1df0d4e117d13" Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.200506 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" event={"ID":"ef309bc8-cb73-4693-86f3-796b61df2f08","Type":"ContainerStarted","Data":"798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578"} Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.200560 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" event={"ID":"ef309bc8-cb73-4693-86f3-796b61df2f08","Type":"ContainerStarted","Data":"b989ac4180c2113776bbe643edb1b3f1bc994c3c9ecbf0c856ac565c0512f455"} Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.201983 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.202975 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" event={"ID":"a0bc1bd5-b2e3-464b-83df-0f2242b5150e","Type":"ContainerStarted","Data":"7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc"} Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.203005 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" event={"ID":"a0bc1bd5-b2e3-464b-83df-0f2242b5150e","Type":"ContainerStarted","Data":"7ee49d014cf8c1ba0f1732078093a230943bcd0e6e599be95bcfadbdc0459a73"} Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.203302 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.244023 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" podStartSLOduration=1.244004932 podStartE2EDuration="1.244004932s" podCreationTimestamp="2025-11-25 10:53:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:53:48.24152503 +0000 UTC m=+1325.608120739" watchObservedRunningTime="2025-11-25 10:53:48.244004932 +0000 UTC m=+1325.610600631" Nov 25 10:53:48 crc kubenswrapper[4702]: I1125 10:53:48.244285 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" podStartSLOduration=1.24427964 podStartE2EDuration="1.24427964s" podCreationTimestamp="2025-11-25 10:53:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:53:48.221147747 +0000 UTC m=+1325.587743456" watchObservedRunningTime="2025-11-25 10:53:48.24427964 +0000 UTC m=+1325.610875329" Nov 25 10:54:18 crc kubenswrapper[4702]: I1125 10:54:18.936647 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:54:18 crc kubenswrapper[4702]: I1125 10:54:18.971613 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:54:19 crc kubenswrapper[4702]: I1125 10:54:19.915688 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf"] Nov 25 10:54:19 crc kubenswrapper[4702]: I1125 10:54:19.916031 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" podUID="ef309bc8-cb73-4693-86f3-796b61df2f08" containerName="keystone-api" containerID="cri-o://798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578" gracePeriod=30 Nov 25 10:54:19 crc kubenswrapper[4702]: I1125 10:54:19.925433 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-84ssl"] Nov 25 10:54:19 crc kubenswrapper[4702]: I1125 10:54:19.925660 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" podUID="a0bc1bd5-b2e3-464b-83df-0f2242b5150e" containerName="keystone-api" containerID="cri-o://7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc" gracePeriod=30 Nov 25 10:54:21 crc kubenswrapper[4702]: I1125 10:54:21.098468 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8"] Nov 25 10:54:21 crc kubenswrapper[4702]: I1125 10:54:21.100231 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" podUID="738a98d4-029e-4a13-b7ed-8af9beb67555" containerName="keystone-api" containerID="cri-o://de768052438fa7c9e8b67f7097a5b420fa8a245528196af1b27048d9c6a3dcac" gracePeriod=30 Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.365895 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.385793 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-fernet-keys\") pod \"ef309bc8-cb73-4693-86f3-796b61df2f08\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.385852 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-config-data\") pod \"ef309bc8-cb73-4693-86f3-796b61df2f08\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.385916 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-scripts\") pod \"ef309bc8-cb73-4693-86f3-796b61df2f08\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.385954 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2m8xj\" (UniqueName: \"kubernetes.io/projected/ef309bc8-cb73-4693-86f3-796b61df2f08-kube-api-access-2m8xj\") pod \"ef309bc8-cb73-4693-86f3-796b61df2f08\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.386002 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-credential-keys\") pod \"ef309bc8-cb73-4693-86f3-796b61df2f08\" (UID: \"ef309bc8-cb73-4693-86f3-796b61df2f08\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.394422 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef309bc8-cb73-4693-86f3-796b61df2f08-kube-api-access-2m8xj" (OuterVolumeSpecName: "kube-api-access-2m8xj") pod "ef309bc8-cb73-4693-86f3-796b61df2f08" (UID: "ef309bc8-cb73-4693-86f3-796b61df2f08"). InnerVolumeSpecName "kube-api-access-2m8xj". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.397087 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-scripts" (OuterVolumeSpecName: "scripts") pod "ef309bc8-cb73-4693-86f3-796b61df2f08" (UID: "ef309bc8-cb73-4693-86f3-796b61df2f08"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.400201 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ef309bc8-cb73-4693-86f3-796b61df2f08" (UID: "ef309bc8-cb73-4693-86f3-796b61df2f08"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.400331 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ef309bc8-cb73-4693-86f3-796b61df2f08" (UID: "ef309bc8-cb73-4693-86f3-796b61df2f08"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.407414 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.437237 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-config-data" (OuterVolumeSpecName: "config-data") pod "ef309bc8-cb73-4693-86f3-796b61df2f08" (UID: "ef309bc8-cb73-4693-86f3-796b61df2f08"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.453625 4702 generic.go:334] "Generic (PLEG): container finished" podID="ef309bc8-cb73-4693-86f3-796b61df2f08" containerID="798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578" exitCode=0 Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.453752 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.453944 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" event={"ID":"ef309bc8-cb73-4693-86f3-796b61df2f08","Type":"ContainerDied","Data":"798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578"} Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.454027 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf" event={"ID":"ef309bc8-cb73-4693-86f3-796b61df2f08","Type":"ContainerDied","Data":"b989ac4180c2113776bbe643edb1b3f1bc994c3c9ecbf0c856ac565c0512f455"} Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.454053 4702 scope.go:117] "RemoveContainer" containerID="798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.459481 4702 generic.go:334] "Generic (PLEG): container finished" podID="a0bc1bd5-b2e3-464b-83df-0f2242b5150e" containerID="7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc" exitCode=0 Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.459537 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" event={"ID":"a0bc1bd5-b2e3-464b-83df-0f2242b5150e","Type":"ContainerDied","Data":"7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc"} Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.459564 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" event={"ID":"a0bc1bd5-b2e3-464b-83df-0f2242b5150e","Type":"ContainerDied","Data":"7ee49d014cf8c1ba0f1732078093a230943bcd0e6e599be95bcfadbdc0459a73"} Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.459636 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-84ssl" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487435 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-spr28\" (UniqueName: \"kubernetes.io/projected/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-kube-api-access-spr28\") pod \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487468 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf"] Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487492 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-scripts\") pod \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487535 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-fernet-keys\") pod \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487572 4702 scope.go:117] "RemoveContainer" containerID="798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487575 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-credential-keys\") pod \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487707 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-config-data\") pod \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\" (UID: \"a0bc1bd5-b2e3-464b-83df-0f2242b5150e\") " Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487939 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487962 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487976 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.487989 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2m8xj\" (UniqueName: \"kubernetes.io/projected/ef309bc8-cb73-4693-86f3-796b61df2f08-kube-api-access-2m8xj\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.488003 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ef309bc8-cb73-4693-86f3-796b61df2f08-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: E1125 10:54:23.488583 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578\": container with ID starting with 798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578 not found: ID does not exist" containerID="798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.488642 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578"} err="failed to get container status \"798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578\": rpc error: code = NotFound desc = could not find container \"798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578\": container with ID starting with 798846c1c14eb99be0fb379648d367e8186144e6cce141a1f85bcd8271204578 not found: ID does not exist" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.488696 4702 scope.go:117] "RemoveContainer" containerID="7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.490801 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-kube-api-access-spr28" (OuterVolumeSpecName: "kube-api-access-spr28") pod "a0bc1bd5-b2e3-464b-83df-0f2242b5150e" (UID: "a0bc1bd5-b2e3-464b-83df-0f2242b5150e"). InnerVolumeSpecName "kube-api-access-spr28". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.490849 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "a0bc1bd5-b2e3-464b-83df-0f2242b5150e" (UID: "a0bc1bd5-b2e3-464b-83df-0f2242b5150e"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.491459 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-scripts" (OuterVolumeSpecName: "scripts") pod "a0bc1bd5-b2e3-464b-83df-0f2242b5150e" (UID: "a0bc1bd5-b2e3-464b-83df-0f2242b5150e"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.492871 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "a0bc1bd5-b2e3-464b-83df-0f2242b5150e" (UID: "a0bc1bd5-b2e3-464b-83df-0f2242b5150e"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.494569 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-c6hgf"] Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.508366 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-config-data" (OuterVolumeSpecName: "config-data") pod "a0bc1bd5-b2e3-464b-83df-0f2242b5150e" (UID: "a0bc1bd5-b2e3-464b-83df-0f2242b5150e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.512509 4702 scope.go:117] "RemoveContainer" containerID="7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc" Nov 25 10:54:23 crc kubenswrapper[4702]: E1125 10:54:23.513121 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc\": container with ID starting with 7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc not found: ID does not exist" containerID="7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.513163 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc"} err="failed to get container status \"7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc\": rpc error: code = NotFound desc = could not find container \"7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc\": container with ID starting with 7b4133f5d8175036e0356f1ce3cb2c3064d9a0a42c55e88348bc03f2cbc791dc not found: ID does not exist" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.588891 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-spr28\" (UniqueName: \"kubernetes.io/projected/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-kube-api-access-spr28\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.588955 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.588968 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.588980 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.588993 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a0bc1bd5-b2e3-464b-83df-0f2242b5150e-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.792870 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-84ssl"] Nov 25 10:54:23 crc kubenswrapper[4702]: I1125 10:54:23.798116 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-84ssl"] Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.469154 4702 generic.go:334] "Generic (PLEG): container finished" podID="738a98d4-029e-4a13-b7ed-8af9beb67555" containerID="de768052438fa7c9e8b67f7097a5b420fa8a245528196af1b27048d9c6a3dcac" exitCode=0 Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.469248 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" event={"ID":"738a98d4-029e-4a13-b7ed-8af9beb67555","Type":"ContainerDied","Data":"de768052438fa7c9e8b67f7097a5b420fa8a245528196af1b27048d9c6a3dcac"} Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.539216 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.703526 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-scripts\") pod \"738a98d4-029e-4a13-b7ed-8af9beb67555\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.703640 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-fernet-keys\") pod \"738a98d4-029e-4a13-b7ed-8af9beb67555\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.703675 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-credential-keys\") pod \"738a98d4-029e-4a13-b7ed-8af9beb67555\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.703707 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-config-data\") pod \"738a98d4-029e-4a13-b7ed-8af9beb67555\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.703734 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6mrrn\" (UniqueName: \"kubernetes.io/projected/738a98d4-029e-4a13-b7ed-8af9beb67555-kube-api-access-6mrrn\") pod \"738a98d4-029e-4a13-b7ed-8af9beb67555\" (UID: \"738a98d4-029e-4a13-b7ed-8af9beb67555\") " Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.707719 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-scripts" (OuterVolumeSpecName: "scripts") pod "738a98d4-029e-4a13-b7ed-8af9beb67555" (UID: "738a98d4-029e-4a13-b7ed-8af9beb67555"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.707812 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "738a98d4-029e-4a13-b7ed-8af9beb67555" (UID: "738a98d4-029e-4a13-b7ed-8af9beb67555"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.707828 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/738a98d4-029e-4a13-b7ed-8af9beb67555-kube-api-access-6mrrn" (OuterVolumeSpecName: "kube-api-access-6mrrn") pod "738a98d4-029e-4a13-b7ed-8af9beb67555" (UID: "738a98d4-029e-4a13-b7ed-8af9beb67555"). InnerVolumeSpecName "kube-api-access-6mrrn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.707875 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "738a98d4-029e-4a13-b7ed-8af9beb67555" (UID: "738a98d4-029e-4a13-b7ed-8af9beb67555"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.720587 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-config-data" (OuterVolumeSpecName: "config-data") pod "738a98d4-029e-4a13-b7ed-8af9beb67555" (UID: "738a98d4-029e-4a13-b7ed-8af9beb67555"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.805510 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.805550 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.805559 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.805570 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6mrrn\" (UniqueName: \"kubernetes.io/projected/738a98d4-029e-4a13-b7ed-8af9beb67555-kube-api-access-6mrrn\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:24 crc kubenswrapper[4702]: I1125 10:54:24.805579 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/738a98d4-029e-4a13-b7ed-8af9beb67555-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.410887 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0bc1bd5-b2e3-464b-83df-0f2242b5150e" path="/var/lib/kubelet/pods/a0bc1bd5-b2e3-464b-83df-0f2242b5150e/volumes" Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.411799 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef309bc8-cb73-4693-86f3-796b61df2f08" path="/var/lib/kubelet/pods/ef309bc8-cb73-4693-86f3-796b61df2f08/volumes" Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.480034 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" event={"ID":"738a98d4-029e-4a13-b7ed-8af9beb67555","Type":"ContainerDied","Data":"1e516fa06f59b00b64cba15908479c7c4459b4228a3d69f875b8c1e54ca4bf31"} Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.480065 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8" Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.480121 4702 scope.go:117] "RemoveContainer" containerID="de768052438fa7c9e8b67f7097a5b420fa8a245528196af1b27048d9c6a3dcac" Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.505504 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8"] Nov 25 10:54:25 crc kubenswrapper[4702]: I1125 10:54:25.509609 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-7bb6db6c57-jr9b8"] Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.557093 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-g947l"] Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.563566 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-g947l"] Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.571268 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-9vh8h"] Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.577658 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-9vh8h"] Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.625373 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs"] Nov 25 10:54:26 crc kubenswrapper[4702]: E1125 10:54:26.625776 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="738a98d4-029e-4a13-b7ed-8af9beb67555" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.625801 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="738a98d4-029e-4a13-b7ed-8af9beb67555" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: E1125 10:54:26.625821 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef309bc8-cb73-4693-86f3-796b61df2f08" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.625832 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef309bc8-cb73-4693-86f3-796b61df2f08" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: E1125 10:54:26.625852 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a0bc1bd5-b2e3-464b-83df-0f2242b5150e" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.625861 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a0bc1bd5-b2e3-464b-83df-0f2242b5150e" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.626095 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="738a98d4-029e-4a13-b7ed-8af9beb67555" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.626121 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a0bc1bd5-b2e3-464b-83df-0f2242b5150e" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.626135 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef309bc8-cb73-4693-86f3-796b61df2f08" containerName="keystone-api" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.626888 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.627972 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/689c2b17-7a77-4e7f-a110-07cdf705c250-operator-scripts\") pod \"keystone0d7e-account-delete-mfnvs\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.628014 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fxpt2\" (UniqueName: \"kubernetes.io/projected/689c2b17-7a77-4e7f-a110-07cdf705c250-kube-api-access-fxpt2\") pod \"keystone0d7e-account-delete-mfnvs\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.637111 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs"] Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.729729 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/689c2b17-7a77-4e7f-a110-07cdf705c250-operator-scripts\") pod \"keystone0d7e-account-delete-mfnvs\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.729775 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fxpt2\" (UniqueName: \"kubernetes.io/projected/689c2b17-7a77-4e7f-a110-07cdf705c250-kube-api-access-fxpt2\") pod \"keystone0d7e-account-delete-mfnvs\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.730945 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/689c2b17-7a77-4e7f-a110-07cdf705c250-operator-scripts\") pod \"keystone0d7e-account-delete-mfnvs\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.749179 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fxpt2\" (UniqueName: \"kubernetes.io/projected/689c2b17-7a77-4e7f-a110-07cdf705c250-kube-api-access-fxpt2\") pod \"keystone0d7e-account-delete-mfnvs\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:26 crc kubenswrapper[4702]: I1125 10:54:26.948224 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.145330 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs"] Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.411297 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1480bd7a-6a56-4ba2-8909-255f69b95fb7" path="/var/lib/kubelet/pods/1480bd7a-6a56-4ba2-8909-255f69b95fb7/volumes" Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.412154 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f" path="/var/lib/kubelet/pods/5ac3fdf4-ce56-4658-8d84-e2a4b96fcc2f/volumes" Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.412657 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="738a98d4-029e-4a13-b7ed-8af9beb67555" path="/var/lib/kubelet/pods/738a98d4-029e-4a13-b7ed-8af9beb67555/volumes" Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.496532 4702 generic.go:334] "Generic (PLEG): container finished" podID="689c2b17-7a77-4e7f-a110-07cdf705c250" containerID="981b1fcfa0dbe2677cbb696df2870e06c2a008295f64b21996113f85b389d3db" exitCode=0 Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.496650 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" event={"ID":"689c2b17-7a77-4e7f-a110-07cdf705c250","Type":"ContainerDied","Data":"981b1fcfa0dbe2677cbb696df2870e06c2a008295f64b21996113f85b389d3db"} Nov 25 10:54:27 crc kubenswrapper[4702]: I1125 10:54:27.496688 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" event={"ID":"689c2b17-7a77-4e7f-a110-07cdf705c250","Type":"ContainerStarted","Data":"63b2b201b10a24c50683b59ca5f26ae8cb28381de6f47fc84982d97cd28fc104"} Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.727928 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.862624 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/689c2b17-7a77-4e7f-a110-07cdf705c250-operator-scripts\") pod \"689c2b17-7a77-4e7f-a110-07cdf705c250\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.862701 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fxpt2\" (UniqueName: \"kubernetes.io/projected/689c2b17-7a77-4e7f-a110-07cdf705c250-kube-api-access-fxpt2\") pod \"689c2b17-7a77-4e7f-a110-07cdf705c250\" (UID: \"689c2b17-7a77-4e7f-a110-07cdf705c250\") " Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.863492 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/689c2b17-7a77-4e7f-a110-07cdf705c250-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "689c2b17-7a77-4e7f-a110-07cdf705c250" (UID: "689c2b17-7a77-4e7f-a110-07cdf705c250"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.868068 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/689c2b17-7a77-4e7f-a110-07cdf705c250-kube-api-access-fxpt2" (OuterVolumeSpecName: "kube-api-access-fxpt2") pod "689c2b17-7a77-4e7f-a110-07cdf705c250" (UID: "689c2b17-7a77-4e7f-a110-07cdf705c250"). InnerVolumeSpecName "kube-api-access-fxpt2". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.964143 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/689c2b17-7a77-4e7f-a110-07cdf705c250-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:28 crc kubenswrapper[4702]: I1125 10:54:28.964203 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fxpt2\" (UniqueName: \"kubernetes.io/projected/689c2b17-7a77-4e7f-a110-07cdf705c250-kube-api-access-fxpt2\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:29 crc kubenswrapper[4702]: I1125 10:54:29.510822 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" event={"ID":"689c2b17-7a77-4e7f-a110-07cdf705c250","Type":"ContainerDied","Data":"63b2b201b10a24c50683b59ca5f26ae8cb28381de6f47fc84982d97cd28fc104"} Nov 25 10:54:29 crc kubenswrapper[4702]: I1125 10:54:29.510869 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="63b2b201b10a24c50683b59ca5f26ae8cb28381de6f47fc84982d97cd28fc104" Nov 25 10:54:29 crc kubenswrapper[4702]: I1125 10:54:29.510887 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.651293 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-jwtgn"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.657004 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-jwtgn"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.677828 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.683111 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.688192 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone0d7e-account-delete-mfnvs"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.695129 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-0d7e-account-create-update-xfmrj"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.743445 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-zhk4w"] Nov 25 10:54:31 crc kubenswrapper[4702]: E1125 10:54:31.743713 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="689c2b17-7a77-4e7f-a110-07cdf705c250" containerName="mariadb-account-delete" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.743729 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="689c2b17-7a77-4e7f-a110-07cdf705c250" containerName="mariadb-account-delete" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.743836 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="689c2b17-7a77-4e7f-a110-07cdf705c250" containerName="mariadb-account-delete" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.745194 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.750580 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-zhk4w"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.850138 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.851053 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.853435 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.861154 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d"] Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.904081 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f097a1-ec7a-483b-92b2-dd158a54c662-operator-scripts\") pod \"keystone-db-create-zhk4w\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:31 crc kubenswrapper[4702]: I1125 10:54:31.904402 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6f5hs\" (UniqueName: \"kubernetes.io/projected/58f097a1-ec7a-483b-92b2-dd158a54c662-kube-api-access-6f5hs\") pod \"keystone-db-create-zhk4w\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.005592 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6f5hs\" (UniqueName: \"kubernetes.io/projected/58f097a1-ec7a-483b-92b2-dd158a54c662-kube-api-access-6f5hs\") pod \"keystone-db-create-zhk4w\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.005695 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87226457-4fbe-4677-bff8-edf8e8619c9b-operator-scripts\") pod \"keystone-99e9-account-create-update-6lp7d\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.005734 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rzlrl\" (UniqueName: \"kubernetes.io/projected/87226457-4fbe-4677-bff8-edf8e8619c9b-kube-api-access-rzlrl\") pod \"keystone-99e9-account-create-update-6lp7d\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.005768 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f097a1-ec7a-483b-92b2-dd158a54c662-operator-scripts\") pod \"keystone-db-create-zhk4w\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.006473 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f097a1-ec7a-483b-92b2-dd158a54c662-operator-scripts\") pod \"keystone-db-create-zhk4w\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.026685 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6f5hs\" (UniqueName: \"kubernetes.io/projected/58f097a1-ec7a-483b-92b2-dd158a54c662-kube-api-access-6f5hs\") pod \"keystone-db-create-zhk4w\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.061802 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.106513 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87226457-4fbe-4677-bff8-edf8e8619c9b-operator-scripts\") pod \"keystone-99e9-account-create-update-6lp7d\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.106576 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rzlrl\" (UniqueName: \"kubernetes.io/projected/87226457-4fbe-4677-bff8-edf8e8619c9b-kube-api-access-rzlrl\") pod \"keystone-99e9-account-create-update-6lp7d\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.107340 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87226457-4fbe-4677-bff8-edf8e8619c9b-operator-scripts\") pod \"keystone-99e9-account-create-update-6lp7d\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.126554 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rzlrl\" (UniqueName: \"kubernetes.io/projected/87226457-4fbe-4677-bff8-edf8e8619c9b-kube-api-access-rzlrl\") pod \"keystone-99e9-account-create-update-6lp7d\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.165842 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.489437 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-zhk4w"] Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.536236 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" event={"ID":"58f097a1-ec7a-483b-92b2-dd158a54c662","Type":"ContainerStarted","Data":"b9fd83e2c73705fde78f5d43473f75f25977c0fd2e928d3bebecac58a2df3e36"} Nov 25 10:54:32 crc kubenswrapper[4702]: I1125 10:54:32.587112 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d"] Nov 25 10:54:32 crc kubenswrapper[4702]: W1125 10:54:32.591825 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod87226457_4fbe_4677_bff8_edf8e8619c9b.slice/crio-ead10c53593fe4195b5a3661490f7a00298175ba9f95a0a94b5849f5906d4797 WatchSource:0}: Error finding container ead10c53593fe4195b5a3661490f7a00298175ba9f95a0a94b5849f5906d4797: Status 404 returned error can't find the container with id ead10c53593fe4195b5a3661490f7a00298175ba9f95a0a94b5849f5906d4797 Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.416080 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="13830540-0ae9-4c80-b8b7-3d170f518a69" path="/var/lib/kubelet/pods/13830540-0ae9-4c80-b8b7-3d170f518a69/volumes" Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.417447 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30a2ef62-8e88-436b-9d79-8794b7e637fa" path="/var/lib/kubelet/pods/30a2ef62-8e88-436b-9d79-8794b7e637fa/volumes" Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.417956 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="689c2b17-7a77-4e7f-a110-07cdf705c250" path="/var/lib/kubelet/pods/689c2b17-7a77-4e7f-a110-07cdf705c250/volumes" Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.547711 4702 generic.go:334] "Generic (PLEG): container finished" podID="87226457-4fbe-4677-bff8-edf8e8619c9b" containerID="c2ff9776f4536b8d26994d2aef594f34dc183e1e01b29f8ada227b0bbc3684ab" exitCode=0 Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.547770 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" event={"ID":"87226457-4fbe-4677-bff8-edf8e8619c9b","Type":"ContainerDied","Data":"c2ff9776f4536b8d26994d2aef594f34dc183e1e01b29f8ada227b0bbc3684ab"} Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.547817 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" event={"ID":"87226457-4fbe-4677-bff8-edf8e8619c9b","Type":"ContainerStarted","Data":"ead10c53593fe4195b5a3661490f7a00298175ba9f95a0a94b5849f5906d4797"} Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.549300 4702 generic.go:334] "Generic (PLEG): container finished" podID="58f097a1-ec7a-483b-92b2-dd158a54c662" containerID="91647b2ee01839662d1ba608c4d54aeda9e941c35df0e7dd8a14a1ee6e76292d" exitCode=0 Nov 25 10:54:33 crc kubenswrapper[4702]: I1125 10:54:33.549352 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" event={"ID":"58f097a1-ec7a-483b-92b2-dd158a54c662","Type":"ContainerDied","Data":"91647b2ee01839662d1ba608c4d54aeda9e941c35df0e7dd8a14a1ee6e76292d"} Nov 25 10:54:34 crc kubenswrapper[4702]: I1125 10:54:34.863141 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:34 crc kubenswrapper[4702]: I1125 10:54:34.872670 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.052188 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87226457-4fbe-4677-bff8-edf8e8619c9b-operator-scripts\") pod \"87226457-4fbe-4677-bff8-edf8e8619c9b\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.052292 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rzlrl\" (UniqueName: \"kubernetes.io/projected/87226457-4fbe-4677-bff8-edf8e8619c9b-kube-api-access-rzlrl\") pod \"87226457-4fbe-4677-bff8-edf8e8619c9b\" (UID: \"87226457-4fbe-4677-bff8-edf8e8619c9b\") " Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.052386 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6f5hs\" (UniqueName: \"kubernetes.io/projected/58f097a1-ec7a-483b-92b2-dd158a54c662-kube-api-access-6f5hs\") pod \"58f097a1-ec7a-483b-92b2-dd158a54c662\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.052426 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f097a1-ec7a-483b-92b2-dd158a54c662-operator-scripts\") pod \"58f097a1-ec7a-483b-92b2-dd158a54c662\" (UID: \"58f097a1-ec7a-483b-92b2-dd158a54c662\") " Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.053140 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/58f097a1-ec7a-483b-92b2-dd158a54c662-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "58f097a1-ec7a-483b-92b2-dd158a54c662" (UID: "58f097a1-ec7a-483b-92b2-dd158a54c662"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.053165 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87226457-4fbe-4677-bff8-edf8e8619c9b-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "87226457-4fbe-4677-bff8-edf8e8619c9b" (UID: "87226457-4fbe-4677-bff8-edf8e8619c9b"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.058306 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/58f097a1-ec7a-483b-92b2-dd158a54c662-kube-api-access-6f5hs" (OuterVolumeSpecName: "kube-api-access-6f5hs") pod "58f097a1-ec7a-483b-92b2-dd158a54c662" (UID: "58f097a1-ec7a-483b-92b2-dd158a54c662"). InnerVolumeSpecName "kube-api-access-6f5hs". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.059120 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87226457-4fbe-4677-bff8-edf8e8619c9b-kube-api-access-rzlrl" (OuterVolumeSpecName: "kube-api-access-rzlrl") pod "87226457-4fbe-4677-bff8-edf8e8619c9b" (UID: "87226457-4fbe-4677-bff8-edf8e8619c9b"). InnerVolumeSpecName "kube-api-access-rzlrl". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.153402 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f5hs\" (UniqueName: \"kubernetes.io/projected/58f097a1-ec7a-483b-92b2-dd158a54c662-kube-api-access-6f5hs\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.153442 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/58f097a1-ec7a-483b-92b2-dd158a54c662-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.153454 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/87226457-4fbe-4677-bff8-edf8e8619c9b-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.153464 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rzlrl\" (UniqueName: \"kubernetes.io/projected/87226457-4fbe-4677-bff8-edf8e8619c9b-kube-api-access-rzlrl\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.572652 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" event={"ID":"87226457-4fbe-4677-bff8-edf8e8619c9b","Type":"ContainerDied","Data":"ead10c53593fe4195b5a3661490f7a00298175ba9f95a0a94b5849f5906d4797"} Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.573436 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ead10c53593fe4195b5a3661490f7a00298175ba9f95a0a94b5849f5906d4797" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.572672 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.574405 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" event={"ID":"58f097a1-ec7a-483b-92b2-dd158a54c662","Type":"ContainerDied","Data":"b9fd83e2c73705fde78f5d43473f75f25977c0fd2e928d3bebecac58a2df3e36"} Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.574437 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b9fd83e2c73705fde78f5d43473f75f25977c0fd2e928d3bebecac58a2df3e36" Nov 25 10:54:35 crc kubenswrapper[4702]: I1125 10:54:35.574495 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-zhk4w" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.410707 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-bhrfr"] Nov 25 10:54:37 crc kubenswrapper[4702]: E1125 10:54:37.410985 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="58f097a1-ec7a-483b-92b2-dd158a54c662" containerName="mariadb-database-create" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.411003 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="58f097a1-ec7a-483b-92b2-dd158a54c662" containerName="mariadb-database-create" Nov 25 10:54:37 crc kubenswrapper[4702]: E1125 10:54:37.411032 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="87226457-4fbe-4677-bff8-edf8e8619c9b" containerName="mariadb-account-create-update" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.411041 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="87226457-4fbe-4677-bff8-edf8e8619c9b" containerName="mariadb-account-create-update" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.411186 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="87226457-4fbe-4677-bff8-edf8e8619c9b" containerName="mariadb-account-create-update" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.411203 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="58f097a1-ec7a-483b-92b2-dd158a54c662" containerName="mariadb-database-create" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.411738 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.418751 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-tc5ss" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.419970 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.419976 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"combined-ca-bundle" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.420021 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.420376 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.422163 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-bhrfr"] Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.582655 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-config-data\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.582741 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-combined-ca-bundle\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.583128 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pltw9\" (UniqueName: \"kubernetes.io/projected/e51c3d93-e832-4a5a-bf13-b512a6585742-kube-api-access-pltw9\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.684671 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pltw9\" (UniqueName: \"kubernetes.io/projected/e51c3d93-e832-4a5a-bf13-b512a6585742-kube-api-access-pltw9\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.685041 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-config-data\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.685078 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-combined-ca-bundle\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.691347 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-config-data\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.691884 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-combined-ca-bundle\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.704973 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pltw9\" (UniqueName: \"kubernetes.io/projected/e51c3d93-e832-4a5a-bf13-b512a6585742-kube-api-access-pltw9\") pod \"keystone-db-sync-bhrfr\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:37 crc kubenswrapper[4702]: I1125 10:54:37.726616 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:38 crc kubenswrapper[4702]: I1125 10:54:38.132918 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-bhrfr"] Nov 25 10:54:38 crc kubenswrapper[4702]: I1125 10:54:38.603821 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" event={"ID":"e51c3d93-e832-4a5a-bf13-b512a6585742","Type":"ContainerStarted","Data":"a220a5e2311e81b030c74226a63a7ea4718764b1076c55f00ef41a425ac43e34"} Nov 25 10:54:38 crc kubenswrapper[4702]: I1125 10:54:38.603884 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" event={"ID":"e51c3d93-e832-4a5a-bf13-b512a6585742","Type":"ContainerStarted","Data":"fa590eb4d4800e6a54217a89f8a1c3b16fedce149ea9e7048450f1de66d5f6c8"} Nov 25 10:54:38 crc kubenswrapper[4702]: I1125 10:54:38.637248 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" podStartSLOduration=1.637226286 podStartE2EDuration="1.637226286s" podCreationTimestamp="2025-11-25 10:54:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:54:38.637083242 +0000 UTC m=+1376.003678961" watchObservedRunningTime="2025-11-25 10:54:38.637226286 +0000 UTC m=+1376.003821975" Nov 25 10:54:40 crc kubenswrapper[4702]: I1125 10:54:40.621513 4702 generic.go:334] "Generic (PLEG): container finished" podID="e51c3d93-e832-4a5a-bf13-b512a6585742" containerID="a220a5e2311e81b030c74226a63a7ea4718764b1076c55f00ef41a425ac43e34" exitCode=0 Nov 25 10:54:40 crc kubenswrapper[4702]: I1125 10:54:40.621709 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" event={"ID":"e51c3d93-e832-4a5a-bf13-b512a6585742","Type":"ContainerDied","Data":"a220a5e2311e81b030c74226a63a7ea4718764b1076c55f00ef41a425ac43e34"} Nov 25 10:54:41 crc kubenswrapper[4702]: I1125 10:54:41.871415 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.049572 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pltw9\" (UniqueName: \"kubernetes.io/projected/e51c3d93-e832-4a5a-bf13-b512a6585742-kube-api-access-pltw9\") pod \"e51c3d93-e832-4a5a-bf13-b512a6585742\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.049773 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-config-data\") pod \"e51c3d93-e832-4a5a-bf13-b512a6585742\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.049814 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-combined-ca-bundle\") pod \"e51c3d93-e832-4a5a-bf13-b512a6585742\" (UID: \"e51c3d93-e832-4a5a-bf13-b512a6585742\") " Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.056830 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e51c3d93-e832-4a5a-bf13-b512a6585742-kube-api-access-pltw9" (OuterVolumeSpecName: "kube-api-access-pltw9") pod "e51c3d93-e832-4a5a-bf13-b512a6585742" (UID: "e51c3d93-e832-4a5a-bf13-b512a6585742"). InnerVolumeSpecName "kube-api-access-pltw9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.072342 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "e51c3d93-e832-4a5a-bf13-b512a6585742" (UID: "e51c3d93-e832-4a5a-bf13-b512a6585742"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.085153 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-config-data" (OuterVolumeSpecName: "config-data") pod "e51c3d93-e832-4a5a-bf13-b512a6585742" (UID: "e51c3d93-e832-4a5a-bf13-b512a6585742"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.151512 4702 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.151557 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pltw9\" (UniqueName: \"kubernetes.io/projected/e51c3d93-e832-4a5a-bf13-b512a6585742-kube-api-access-pltw9\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.151573 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e51c3d93-e832-4a5a-bf13-b512a6585742-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.637208 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" event={"ID":"e51c3d93-e832-4a5a-bf13-b512a6585742","Type":"ContainerDied","Data":"fa590eb4d4800e6a54217a89f8a1c3b16fedce149ea9e7048450f1de66d5f6c8"} Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.637257 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fa590eb4d4800e6a54217a89f8a1c3b16fedce149ea9e7048450f1de66d5f6c8" Nov 25 10:54:42 crc kubenswrapper[4702]: I1125 10:54:42.637277 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-bhrfr" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.056083 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-xj449"] Nov 25 10:54:43 crc kubenswrapper[4702]: E1125 10:54:43.056375 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e51c3d93-e832-4a5a-bf13-b512a6585742" containerName="keystone-db-sync" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.056390 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e51c3d93-e832-4a5a-bf13-b512a6585742" containerName="keystone-db-sync" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.056577 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e51c3d93-e832-4a5a-bf13-b512a6585742" containerName="keystone-db-sync" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.057172 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.063113 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.063797 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"combined-ca-bundle" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.063856 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.063864 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.063986 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-tc5ss" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.064129 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.069063 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-xj449"] Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.165945 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-credential-keys\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.166157 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-scripts\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.166252 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-fernet-keys\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.166347 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-combined-ca-bundle\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.166802 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-config-data\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.166893 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kt2zg\" (UniqueName: \"kubernetes.io/projected/ffb83fd7-f533-4feb-bf57-d139d8d1b550-kube-api-access-kt2zg\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.268643 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-credential-keys\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.268726 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-scripts\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.268776 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-fernet-keys\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.268798 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-combined-ca-bundle\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.268825 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-config-data\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.268848 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kt2zg\" (UniqueName: \"kubernetes.io/projected/ffb83fd7-f533-4feb-bf57-d139d8d1b550-kube-api-access-kt2zg\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.275104 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-credential-keys\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.275299 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-fernet-keys\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.275710 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-scripts\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.276252 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-combined-ca-bundle\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.276255 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-config-data\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.286766 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kt2zg\" (UniqueName: \"kubernetes.io/projected/ffb83fd7-f533-4feb-bf57-d139d8d1b550-kube-api-access-kt2zg\") pod \"keystone-bootstrap-xj449\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.381031 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-tc5ss" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.388503 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.591524 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.591849 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.794589 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-xj449"] Nov 25 10:54:43 crc kubenswrapper[4702]: I1125 10:54:43.807007 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:54:44 crc kubenswrapper[4702]: I1125 10:54:44.653785 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" event={"ID":"ffb83fd7-f533-4feb-bf57-d139d8d1b550","Type":"ContainerStarted","Data":"bd0dca3dea27484db4e8258da18778e24d1580208fcb39d5bee2c54067187158"} Nov 25 10:54:44 crc kubenswrapper[4702]: I1125 10:54:44.653853 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" event={"ID":"ffb83fd7-f533-4feb-bf57-d139d8d1b550","Type":"ContainerStarted","Data":"660f4b2278a160c16b63313fc4f4462c939ff45bc61a44818d57fd6ce9b77f3e"} Nov 25 10:54:44 crc kubenswrapper[4702]: I1125 10:54:44.679671 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" podStartSLOduration=1.679649269 podStartE2EDuration="1.679649269s" podCreationTimestamp="2025-11-25 10:54:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:54:44.678182596 +0000 UTC m=+1382.044778285" watchObservedRunningTime="2025-11-25 10:54:44.679649269 +0000 UTC m=+1382.046244958" Nov 25 10:54:46 crc kubenswrapper[4702]: I1125 10:54:46.669544 4702 generic.go:334] "Generic (PLEG): container finished" podID="ffb83fd7-f533-4feb-bf57-d139d8d1b550" containerID="bd0dca3dea27484db4e8258da18778e24d1580208fcb39d5bee2c54067187158" exitCode=0 Nov 25 10:54:46 crc kubenswrapper[4702]: I1125 10:54:46.669644 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" event={"ID":"ffb83fd7-f533-4feb-bf57-d139d8d1b550","Type":"ContainerDied","Data":"bd0dca3dea27484db4e8258da18778e24d1580208fcb39d5bee2c54067187158"} Nov 25 10:54:47 crc kubenswrapper[4702]: I1125 10:54:47.854008 4702 scope.go:117] "RemoveContainer" containerID="d53a99b17509fb3e48c6519c05d8fe60358a92d7ac6f58643b9ac68c0113b550" Nov 25 10:54:47 crc kubenswrapper[4702]: I1125 10:54:47.957365 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.057431 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-credential-keys\") pod \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.057556 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-combined-ca-bundle\") pod \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.057620 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kt2zg\" (UniqueName: \"kubernetes.io/projected/ffb83fd7-f533-4feb-bf57-d139d8d1b550-kube-api-access-kt2zg\") pod \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.057673 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-fernet-keys\") pod \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.057700 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-config-data\") pod \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.057811 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-scripts\") pod \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\" (UID: \"ffb83fd7-f533-4feb-bf57-d139d8d1b550\") " Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.064377 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-scripts" (OuterVolumeSpecName: "scripts") pod "ffb83fd7-f533-4feb-bf57-d139d8d1b550" (UID: "ffb83fd7-f533-4feb-bf57-d139d8d1b550"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.064645 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ffb83fd7-f533-4feb-bf57-d139d8d1b550" (UID: "ffb83fd7-f533-4feb-bf57-d139d8d1b550"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.064655 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ffb83fd7-f533-4feb-bf57-d139d8d1b550-kube-api-access-kt2zg" (OuterVolumeSpecName: "kube-api-access-kt2zg") pod "ffb83fd7-f533-4feb-bf57-d139d8d1b550" (UID: "ffb83fd7-f533-4feb-bf57-d139d8d1b550"). InnerVolumeSpecName "kube-api-access-kt2zg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.064747 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ffb83fd7-f533-4feb-bf57-d139d8d1b550" (UID: "ffb83fd7-f533-4feb-bf57-d139d8d1b550"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.084760 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "ffb83fd7-f533-4feb-bf57-d139d8d1b550" (UID: "ffb83fd7-f533-4feb-bf57-d139d8d1b550"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.085047 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-config-data" (OuterVolumeSpecName: "config-data") pod "ffb83fd7-f533-4feb-bf57-d139d8d1b550" (UID: "ffb83fd7-f533-4feb-bf57-d139d8d1b550"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.160072 4702 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.160121 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kt2zg\" (UniqueName: \"kubernetes.io/projected/ffb83fd7-f533-4feb-bf57-d139d8d1b550-kube-api-access-kt2zg\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.160145 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.160156 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.160166 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.160176 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ffb83fd7-f533-4feb-bf57-d139d8d1b550-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.685117 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" event={"ID":"ffb83fd7-f533-4feb-bf57-d139d8d1b550","Type":"ContainerDied","Data":"660f4b2278a160c16b63313fc4f4462c939ff45bc61a44818d57fd6ce9b77f3e"} Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.685161 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="660f4b2278a160c16b63313fc4f4462c939ff45bc61a44818d57fd6ce9b77f3e" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.685168 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-xj449" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.770096 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8"] Nov 25 10:54:48 crc kubenswrapper[4702]: E1125 10:54:48.770433 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ffb83fd7-f533-4feb-bf57-d139d8d1b550" containerName="keystone-bootstrap" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.770460 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ffb83fd7-f533-4feb-bf57-d139d8d1b550" containerName="keystone-bootstrap" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.770625 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="ffb83fd7-f533-4feb-bf57-d139d8d1b550" containerName="keystone-bootstrap" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.771666 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.774617 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.777233 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8"] Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.777389 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"combined-ca-bundle" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.777546 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"cert-keystone-internal-svc" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.777665 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"cert-keystone-public-svc" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.782329 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.782533 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-tc5ss" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.782685 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.868422 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-combined-ca-bundle\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.868625 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-fernet-keys\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.868798 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-public-tls-certs\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.868842 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-krbps\" (UniqueName: \"kubernetes.io/projected/bced984a-f136-4ae9-86a7-179e0a94530b-kube-api-access-krbps\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.868883 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-internal-tls-certs\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.868981 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-config-data\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.869094 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-scripts\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.869140 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-credential-keys\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.970919 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-public-tls-certs\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.970985 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-krbps\" (UniqueName: \"kubernetes.io/projected/bced984a-f136-4ae9-86a7-179e0a94530b-kube-api-access-krbps\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.971018 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-internal-tls-certs\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.971052 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-config-data\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.972193 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-scripts\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.972277 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-credential-keys\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.972368 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-combined-ca-bundle\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.972428 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-fernet-keys\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.975175 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-internal-tls-certs\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.975303 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-public-tls-certs\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.975993 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-combined-ca-bundle\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.976223 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-credential-keys\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.976285 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-config-data\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.976291 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-scripts\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.977511 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-fernet-keys\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:48 crc kubenswrapper[4702]: I1125 10:54:48.990379 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-krbps\" (UniqueName: \"kubernetes.io/projected/bced984a-f136-4ae9-86a7-179e0a94530b-kube-api-access-krbps\") pod \"keystone-5fb9c4d8d8-9wwc8\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:49 crc kubenswrapper[4702]: I1125 10:54:49.087331 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:49 crc kubenswrapper[4702]: I1125 10:54:49.293217 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8"] Nov 25 10:54:49 crc kubenswrapper[4702]: I1125 10:54:49.694417 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" event={"ID":"bced984a-f136-4ae9-86a7-179e0a94530b","Type":"ContainerStarted","Data":"e429f00ef0a1a3fc9ef91690f2203dbb399b5a15878e658de753c8426d2f8ba7"} Nov 25 10:54:49 crc kubenswrapper[4702]: I1125 10:54:49.694786 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" event={"ID":"bced984a-f136-4ae9-86a7-179e0a94530b","Type":"ContainerStarted","Data":"bb54780a1eb0decdfb0cf53766315715b5a144abf1cdcbb02d6d7af055fc874a"} Nov 25 10:54:49 crc kubenswrapper[4702]: I1125 10:54:49.695037 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:54:49 crc kubenswrapper[4702]: I1125 10:54:49.714159 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" podStartSLOduration=1.7141376240000001 podStartE2EDuration="1.714137624s" podCreationTimestamp="2025-11-25 10:54:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:54:49.709362466 +0000 UTC m=+1387.075958155" watchObservedRunningTime="2025-11-25 10:54:49.714137624 +0000 UTC m=+1387.080733313" Nov 25 10:55:13 crc kubenswrapper[4702]: I1125 10:55:13.590701 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:55:13 crc kubenswrapper[4702]: I1125 10:55:13.591344 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:55:20 crc kubenswrapper[4702]: I1125 10:55:20.540174 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.038202 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-bhrfr"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.047689 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-bhrfr"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.054576 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-xj449"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.059683 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-xj449"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.064165 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.064371 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" podUID="bced984a-f136-4ae9-86a7-179e0a94530b" containerName="keystone-api" containerID="cri-o://e429f00ef0a1a3fc9ef91690f2203dbb399b5a15878e658de753c8426d2f8ba7" gracePeriod=30 Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.118562 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone99e9-account-delete-v9w4m"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.119633 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.136040 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone99e9-account-delete-v9w4m"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.241922 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2229a75d-96c6-4bd6-8a1f-c40b104bb174-operator-scripts\") pod \"keystone99e9-account-delete-v9w4m\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.242268 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wnm8k\" (UniqueName: \"kubernetes.io/projected/2229a75d-96c6-4bd6-8a1f-c40b104bb174-kube-api-access-wnm8k\") pod \"keystone99e9-account-delete-v9w4m\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.343700 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2229a75d-96c6-4bd6-8a1f-c40b104bb174-operator-scripts\") pod \"keystone99e9-account-delete-v9w4m\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.343791 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wnm8k\" (UniqueName: \"kubernetes.io/projected/2229a75d-96c6-4bd6-8a1f-c40b104bb174-kube-api-access-wnm8k\") pod \"keystone99e9-account-delete-v9w4m\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.344531 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2229a75d-96c6-4bd6-8a1f-c40b104bb174-operator-scripts\") pod \"keystone99e9-account-delete-v9w4m\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.367306 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wnm8k\" (UniqueName: \"kubernetes.io/projected/2229a75d-96c6-4bd6-8a1f-c40b104bb174-kube-api-access-wnm8k\") pod \"keystone99e9-account-delete-v9w4m\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.448735 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.644304 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone99e9-account-delete-v9w4m"] Nov 25 10:55:22 crc kubenswrapper[4702]: I1125 10:55:22.925794 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" event={"ID":"2229a75d-96c6-4bd6-8a1f-c40b104bb174","Type":"ContainerStarted","Data":"93c8397d5c76f1b1520cf7f4d8f8b782084109c5b07cf55a2350c5d37d137c92"} Nov 25 10:55:23 crc kubenswrapper[4702]: I1125 10:55:23.411822 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e51c3d93-e832-4a5a-bf13-b512a6585742" path="/var/lib/kubelet/pods/e51c3d93-e832-4a5a-bf13-b512a6585742/volumes" Nov 25 10:55:23 crc kubenswrapper[4702]: I1125 10:55:23.412355 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ffb83fd7-f533-4feb-bf57-d139d8d1b550" path="/var/lib/kubelet/pods/ffb83fd7-f533-4feb-bf57-d139d8d1b550/volumes" Nov 25 10:55:23 crc kubenswrapper[4702]: I1125 10:55:23.935222 4702 generic.go:334] "Generic (PLEG): container finished" podID="2229a75d-96c6-4bd6-8a1f-c40b104bb174" containerID="d7256210ff50e62235f6d651064e6e58c732176074fd0f20feaa2293fdef71d7" exitCode=0 Nov 25 10:55:23 crc kubenswrapper[4702]: I1125 10:55:23.935563 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" event={"ID":"2229a75d-96c6-4bd6-8a1f-c40b104bb174","Type":"ContainerDied","Data":"d7256210ff50e62235f6d651064e6e58c732176074fd0f20feaa2293fdef71d7"} Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.203019 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.390608 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2229a75d-96c6-4bd6-8a1f-c40b104bb174-operator-scripts\") pod \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.390862 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wnm8k\" (UniqueName: \"kubernetes.io/projected/2229a75d-96c6-4bd6-8a1f-c40b104bb174-kube-api-access-wnm8k\") pod \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\" (UID: \"2229a75d-96c6-4bd6-8a1f-c40b104bb174\") " Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.391380 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2229a75d-96c6-4bd6-8a1f-c40b104bb174-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2229a75d-96c6-4bd6-8a1f-c40b104bb174" (UID: "2229a75d-96c6-4bd6-8a1f-c40b104bb174"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.397872 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2229a75d-96c6-4bd6-8a1f-c40b104bb174-kube-api-access-wnm8k" (OuterVolumeSpecName: "kube-api-access-wnm8k") pod "2229a75d-96c6-4bd6-8a1f-c40b104bb174" (UID: "2229a75d-96c6-4bd6-8a1f-c40b104bb174"). InnerVolumeSpecName "kube-api-access-wnm8k". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.493186 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2229a75d-96c6-4bd6-8a1f-c40b104bb174-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.493225 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wnm8k\" (UniqueName: \"kubernetes.io/projected/2229a75d-96c6-4bd6-8a1f-c40b104bb174-kube-api-access-wnm8k\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.956670 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.956649 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone99e9-account-delete-v9w4m" event={"ID":"2229a75d-96c6-4bd6-8a1f-c40b104bb174","Type":"ContainerDied","Data":"93c8397d5c76f1b1520cf7f4d8f8b782084109c5b07cf55a2350c5d37d137c92"} Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.956786 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="93c8397d5c76f1b1520cf7f4d8f8b782084109c5b07cf55a2350c5d37d137c92" Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.958811 4702 generic.go:334] "Generic (PLEG): container finished" podID="bced984a-f136-4ae9-86a7-179e0a94530b" containerID="e429f00ef0a1a3fc9ef91690f2203dbb399b5a15878e658de753c8426d2f8ba7" exitCode=0 Nov 25 10:55:25 crc kubenswrapper[4702]: I1125 10:55:25.958862 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" event={"ID":"bced984a-f136-4ae9-86a7-179e0a94530b","Type":"ContainerDied","Data":"e429f00ef0a1a3fc9ef91690f2203dbb399b5a15878e658de753c8426d2f8ba7"} Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.087123 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.203969 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-combined-ca-bundle\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204015 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-public-tls-certs\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204034 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-scripts\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204073 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-krbps\" (UniqueName: \"kubernetes.io/projected/bced984a-f136-4ae9-86a7-179e0a94530b-kube-api-access-krbps\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204088 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-config-data\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204116 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-credential-keys\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204180 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-internal-tls-certs\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.204227 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-fernet-keys\") pod \"bced984a-f136-4ae9-86a7-179e0a94530b\" (UID: \"bced984a-f136-4ae9-86a7-179e0a94530b\") " Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.209281 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-scripts" (OuterVolumeSpecName: "scripts") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.210669 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bced984a-f136-4ae9-86a7-179e0a94530b-kube-api-access-krbps" (OuterVolumeSpecName: "kube-api-access-krbps") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "kube-api-access-krbps". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.212615 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.212644 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.228558 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-config-data" (OuterVolumeSpecName: "config-data") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.230507 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.243063 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.250099 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "bced984a-f136-4ae9-86a7-179e0a94530b" (UID: "bced984a-f136-4ae9-86a7-179e0a94530b"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.306751 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307235 4702 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307252 4702 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-public-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307271 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307289 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-krbps\" (UniqueName: \"kubernetes.io/projected/bced984a-f136-4ae9-86a7-179e0a94530b-kube-api-access-krbps\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307303 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307314 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.307328 4702 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/bced984a-f136-4ae9-86a7-179e0a94530b-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.965657 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" event={"ID":"bced984a-f136-4ae9-86a7-179e0a94530b","Type":"ContainerDied","Data":"bb54780a1eb0decdfb0cf53766315715b5a144abf1cdcbb02d6d7af055fc874a"} Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.965708 4702 scope.go:117] "RemoveContainer" containerID="e429f00ef0a1a3fc9ef91690f2203dbb399b5a15878e658de753c8426d2f8ba7" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.965744 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8" Nov 25 10:55:26 crc kubenswrapper[4702]: I1125 10:55:26.991050 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.002250 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-5fb9c4d8d8-9wwc8"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.135001 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-zhk4w"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.142100 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-zhk4w"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.147410 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone99e9-account-delete-v9w4m"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.152714 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone99e9-account-delete-v9w4m"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.157376 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.163293 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-99e9-account-create-update-6lp7d"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.384017 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-m2h94"] Nov 25 10:55:27 crc kubenswrapper[4702]: E1125 10:55:27.384280 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bced984a-f136-4ae9-86a7-179e0a94530b" containerName="keystone-api" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.384293 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="bced984a-f136-4ae9-86a7-179e0a94530b" containerName="keystone-api" Nov 25 10:55:27 crc kubenswrapper[4702]: E1125 10:55:27.384312 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2229a75d-96c6-4bd6-8a1f-c40b104bb174" containerName="mariadb-account-delete" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.384320 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2229a75d-96c6-4bd6-8a1f-c40b104bb174" containerName="mariadb-account-delete" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.384427 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="bced984a-f136-4ae9-86a7-179e0a94530b" containerName="keystone-api" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.384440 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="2229a75d-96c6-4bd6-8a1f-c40b104bb174" containerName="mariadb-account-delete" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.384860 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.392862 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.393638 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.395845 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.398837 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.413812 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2229a75d-96c6-4bd6-8a1f-c40b104bb174" path="/var/lib/kubelet/pods/2229a75d-96c6-4bd6-8a1f-c40b104bb174/volumes" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.414399 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="58f097a1-ec7a-483b-92b2-dd158a54c662" path="/var/lib/kubelet/pods/58f097a1-ec7a-483b-92b2-dd158a54c662/volumes" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.414852 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87226457-4fbe-4677-bff8-edf8e8619c9b" path="/var/lib/kubelet/pods/87226457-4fbe-4677-bff8-edf8e8619c9b/volumes" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.415438 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bced984a-f136-4ae9-86a7-179e0a94530b" path="/var/lib/kubelet/pods/bced984a-f136-4ae9-86a7-179e0a94530b/volumes" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.416627 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-m2h94"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.532014 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9zj9l\" (UniqueName: \"kubernetes.io/projected/88932d47-06cc-4724-aedc-642bc0e5bdfa-kube-api-access-9zj9l\") pod \"keystone-05de-account-create-update-4dq9q\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.532074 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ded2cbb5-18e5-461f-8078-e9e564b1ed49-operator-scripts\") pod \"keystone-db-create-m2h94\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.532104 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jb582\" (UniqueName: \"kubernetes.io/projected/ded2cbb5-18e5-461f-8078-e9e564b1ed49-kube-api-access-jb582\") pod \"keystone-db-create-m2h94\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.532120 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88932d47-06cc-4724-aedc-642bc0e5bdfa-operator-scripts\") pod \"keystone-05de-account-create-update-4dq9q\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.633659 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88932d47-06cc-4724-aedc-642bc0e5bdfa-operator-scripts\") pod \"keystone-05de-account-create-update-4dq9q\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.633768 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9zj9l\" (UniqueName: \"kubernetes.io/projected/88932d47-06cc-4724-aedc-642bc0e5bdfa-kube-api-access-9zj9l\") pod \"keystone-05de-account-create-update-4dq9q\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.633793 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ded2cbb5-18e5-461f-8078-e9e564b1ed49-operator-scripts\") pod \"keystone-db-create-m2h94\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.633816 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jb582\" (UniqueName: \"kubernetes.io/projected/ded2cbb5-18e5-461f-8078-e9e564b1ed49-kube-api-access-jb582\") pod \"keystone-db-create-m2h94\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.635482 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ded2cbb5-18e5-461f-8078-e9e564b1ed49-operator-scripts\") pod \"keystone-db-create-m2h94\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.635650 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88932d47-06cc-4724-aedc-642bc0e5bdfa-operator-scripts\") pod \"keystone-05de-account-create-update-4dq9q\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.650920 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9zj9l\" (UniqueName: \"kubernetes.io/projected/88932d47-06cc-4724-aedc-642bc0e5bdfa-kube-api-access-9zj9l\") pod \"keystone-05de-account-create-update-4dq9q\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.652106 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jb582\" (UniqueName: \"kubernetes.io/projected/ded2cbb5-18e5-461f-8078-e9e564b1ed49-kube-api-access-jb582\") pod \"keystone-db-create-m2h94\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.707264 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.713649 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.952867 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q"] Nov 25 10:55:27 crc kubenswrapper[4702]: I1125 10:55:27.972466 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" event={"ID":"88932d47-06cc-4724-aedc-642bc0e5bdfa","Type":"ContainerStarted","Data":"a41e1f28d33b124ffb0a1c366f35470de7a83c9707bd5d185fa1d444c8543b54"} Nov 25 10:55:28 crc kubenswrapper[4702]: I1125 10:55:28.104036 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-m2h94"] Nov 25 10:55:28 crc kubenswrapper[4702]: W1125 10:55:28.104556 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podded2cbb5_18e5_461f_8078_e9e564b1ed49.slice/crio-fe29e15d95202eca701138b6d9ea6d82096312ff25b6ad5c5121d23a6b718a60 WatchSource:0}: Error finding container fe29e15d95202eca701138b6d9ea6d82096312ff25b6ad5c5121d23a6b718a60: Status 404 returned error can't find the container with id fe29e15d95202eca701138b6d9ea6d82096312ff25b6ad5c5121d23a6b718a60 Nov 25 10:55:28 crc kubenswrapper[4702]: I1125 10:55:28.982790 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-m2h94" event={"ID":"ded2cbb5-18e5-461f-8078-e9e564b1ed49","Type":"ContainerStarted","Data":"c339a74f260121e898cf28adac5e3e0d83dfd70a903e82c1cf651cd574e6e7d0"} Nov 25 10:55:28 crc kubenswrapper[4702]: I1125 10:55:28.982852 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-m2h94" event={"ID":"ded2cbb5-18e5-461f-8078-e9e564b1ed49","Type":"ContainerStarted","Data":"fe29e15d95202eca701138b6d9ea6d82096312ff25b6ad5c5121d23a6b718a60"} Nov 25 10:55:28 crc kubenswrapper[4702]: I1125 10:55:28.984723 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" event={"ID":"88932d47-06cc-4724-aedc-642bc0e5bdfa","Type":"ContainerStarted","Data":"836570181d3b203c1e9bdf23df44eceffcd9409da4a20ca978737d29dc351283"} Nov 25 10:55:28 crc kubenswrapper[4702]: I1125 10:55:28.998760 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-create-m2h94" podStartSLOduration=1.998736711 podStartE2EDuration="1.998736711s" podCreationTimestamp="2025-11-25 10:55:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:55:28.995956031 +0000 UTC m=+1426.362551750" watchObservedRunningTime="2025-11-25 10:55:28.998736711 +0000 UTC m=+1426.365332410" Nov 25 10:55:29 crc kubenswrapper[4702]: I1125 10:55:29.009289 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" podStartSLOduration=2.00926999 podStartE2EDuration="2.00926999s" podCreationTimestamp="2025-11-25 10:55:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:55:29.008884889 +0000 UTC m=+1426.375480588" watchObservedRunningTime="2025-11-25 10:55:29.00926999 +0000 UTC m=+1426.375865679" Nov 25 10:55:29 crc kubenswrapper[4702]: I1125 10:55:29.993700 4702 generic.go:334] "Generic (PLEG): container finished" podID="ded2cbb5-18e5-461f-8078-e9e564b1ed49" containerID="c339a74f260121e898cf28adac5e3e0d83dfd70a903e82c1cf651cd574e6e7d0" exitCode=0 Nov 25 10:55:29 crc kubenswrapper[4702]: I1125 10:55:29.993799 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-m2h94" event={"ID":"ded2cbb5-18e5-461f-8078-e9e564b1ed49","Type":"ContainerDied","Data":"c339a74f260121e898cf28adac5e3e0d83dfd70a903e82c1cf651cd574e6e7d0"} Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.001201 4702 generic.go:334] "Generic (PLEG): container finished" podID="88932d47-06cc-4724-aedc-642bc0e5bdfa" containerID="836570181d3b203c1e9bdf23df44eceffcd9409da4a20ca978737d29dc351283" exitCode=0 Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.001309 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" event={"ID":"88932d47-06cc-4724-aedc-642bc0e5bdfa","Type":"ContainerDied","Data":"836570181d3b203c1e9bdf23df44eceffcd9409da4a20ca978737d29dc351283"} Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.268665 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.382490 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ded2cbb5-18e5-461f-8078-e9e564b1ed49-operator-scripts\") pod \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.383017 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jb582\" (UniqueName: \"kubernetes.io/projected/ded2cbb5-18e5-461f-8078-e9e564b1ed49-kube-api-access-jb582\") pod \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\" (UID: \"ded2cbb5-18e5-461f-8078-e9e564b1ed49\") " Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.383232 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ded2cbb5-18e5-461f-8078-e9e564b1ed49-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ded2cbb5-18e5-461f-8078-e9e564b1ed49" (UID: "ded2cbb5-18e5-461f-8078-e9e564b1ed49"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.383772 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ded2cbb5-18e5-461f-8078-e9e564b1ed49-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.388788 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ded2cbb5-18e5-461f-8078-e9e564b1ed49-kube-api-access-jb582" (OuterVolumeSpecName: "kube-api-access-jb582") pod "ded2cbb5-18e5-461f-8078-e9e564b1ed49" (UID: "ded2cbb5-18e5-461f-8078-e9e564b1ed49"). InnerVolumeSpecName "kube-api-access-jb582". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:55:31 crc kubenswrapper[4702]: I1125 10:55:31.484533 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jb582\" (UniqueName: \"kubernetes.io/projected/ded2cbb5-18e5-461f-8078-e9e564b1ed49-kube-api-access-jb582\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.008536 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-m2h94" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.008552 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-m2h94" event={"ID":"ded2cbb5-18e5-461f-8078-e9e564b1ed49","Type":"ContainerDied","Data":"fe29e15d95202eca701138b6d9ea6d82096312ff25b6ad5c5121d23a6b718a60"} Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.010516 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="fe29e15d95202eca701138b6d9ea6d82096312ff25b6ad5c5121d23a6b718a60" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.263616 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.404794 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88932d47-06cc-4724-aedc-642bc0e5bdfa-operator-scripts\") pod \"88932d47-06cc-4724-aedc-642bc0e5bdfa\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.404941 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9zj9l\" (UniqueName: \"kubernetes.io/projected/88932d47-06cc-4724-aedc-642bc0e5bdfa-kube-api-access-9zj9l\") pod \"88932d47-06cc-4724-aedc-642bc0e5bdfa\" (UID: \"88932d47-06cc-4724-aedc-642bc0e5bdfa\") " Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.405510 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88932d47-06cc-4724-aedc-642bc0e5bdfa-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "88932d47-06cc-4724-aedc-642bc0e5bdfa" (UID: "88932d47-06cc-4724-aedc-642bc0e5bdfa"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.408880 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88932d47-06cc-4724-aedc-642bc0e5bdfa-kube-api-access-9zj9l" (OuterVolumeSpecName: "kube-api-access-9zj9l") pod "88932d47-06cc-4724-aedc-642bc0e5bdfa" (UID: "88932d47-06cc-4724-aedc-642bc0e5bdfa"). InnerVolumeSpecName "kube-api-access-9zj9l". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.507232 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88932d47-06cc-4724-aedc-642bc0e5bdfa-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:32 crc kubenswrapper[4702]: I1125 10:55:32.507283 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9zj9l\" (UniqueName: \"kubernetes.io/projected/88932d47-06cc-4724-aedc-642bc0e5bdfa-kube-api-access-9zj9l\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:33 crc kubenswrapper[4702]: I1125 10:55:33.015841 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" event={"ID":"88932d47-06cc-4724-aedc-642bc0e5bdfa","Type":"ContainerDied","Data":"a41e1f28d33b124ffb0a1c366f35470de7a83c9707bd5d185fa1d444c8543b54"} Nov 25 10:55:33 crc kubenswrapper[4702]: I1125 10:55:33.015890 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a41e1f28d33b124ffb0a1c366f35470de7a83c9707bd5d185fa1d444c8543b54" Nov 25 10:55:33 crc kubenswrapper[4702]: I1125 10:55:33.015941 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.043445 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-z45zz"] Nov 25 10:55:38 crc kubenswrapper[4702]: E1125 10:55:38.045149 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ded2cbb5-18e5-461f-8078-e9e564b1ed49" containerName="mariadb-database-create" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.045285 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ded2cbb5-18e5-461f-8078-e9e564b1ed49" containerName="mariadb-database-create" Nov 25 10:55:38 crc kubenswrapper[4702]: E1125 10:55:38.045381 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88932d47-06cc-4724-aedc-642bc0e5bdfa" containerName="mariadb-account-create-update" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.045455 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="88932d47-06cc-4724-aedc-642bc0e5bdfa" containerName="mariadb-account-create-update" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.045720 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="ded2cbb5-18e5-461f-8078-e9e564b1ed49" containerName="mariadb-database-create" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.045806 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="88932d47-06cc-4724-aedc-642bc0e5bdfa" containerName="mariadb-account-create-update" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.046472 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.048819 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.049549 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-qwcxs" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.049744 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.052936 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.053439 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-z45zz"] Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.178071 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e04bb36c-6187-457b-90f7-134b59b183cc-config-data\") pod \"keystone-db-sync-z45zz\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.178574 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmd52\" (UniqueName: \"kubernetes.io/projected/e04bb36c-6187-457b-90f7-134b59b183cc-kube-api-access-zmd52\") pod \"keystone-db-sync-z45zz\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.280409 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmd52\" (UniqueName: \"kubernetes.io/projected/e04bb36c-6187-457b-90f7-134b59b183cc-kube-api-access-zmd52\") pod \"keystone-db-sync-z45zz\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.280498 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e04bb36c-6187-457b-90f7-134b59b183cc-config-data\") pod \"keystone-db-sync-z45zz\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.286442 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e04bb36c-6187-457b-90f7-134b59b183cc-config-data\") pod \"keystone-db-sync-z45zz\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.297691 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmd52\" (UniqueName: \"kubernetes.io/projected/e04bb36c-6187-457b-90f7-134b59b183cc-kube-api-access-zmd52\") pod \"keystone-db-sync-z45zz\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.368707 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:38 crc kubenswrapper[4702]: I1125 10:55:38.543215 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-z45zz"] Nov 25 10:55:39 crc kubenswrapper[4702]: I1125 10:55:39.066108 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" event={"ID":"e04bb36c-6187-457b-90f7-134b59b183cc","Type":"ContainerStarted","Data":"4e875c298086ffb9ebb3f07e90b9e6dc438dee12acdb58f49c555d9a813972d4"} Nov 25 10:55:39 crc kubenswrapper[4702]: I1125 10:55:39.066431 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" event={"ID":"e04bb36c-6187-457b-90f7-134b59b183cc","Type":"ContainerStarted","Data":"576a85c4992dcff84dd4ab8c894b0cb7aad64e24cf8be5102ed06b9201f8d1ef"} Nov 25 10:55:40 crc kubenswrapper[4702]: I1125 10:55:40.097794 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" podStartSLOduration=2.097777635 podStartE2EDuration="2.097777635s" podCreationTimestamp="2025-11-25 10:55:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:55:40.093393961 +0000 UTC m=+1437.459989660" watchObservedRunningTime="2025-11-25 10:55:40.097777635 +0000 UTC m=+1437.464373324" Nov 25 10:55:41 crc kubenswrapper[4702]: I1125 10:55:41.086975 4702 generic.go:334] "Generic (PLEG): container finished" podID="e04bb36c-6187-457b-90f7-134b59b183cc" containerID="4e875c298086ffb9ebb3f07e90b9e6dc438dee12acdb58f49c555d9a813972d4" exitCode=0 Nov 25 10:55:41 crc kubenswrapper[4702]: I1125 10:55:41.087040 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" event={"ID":"e04bb36c-6187-457b-90f7-134b59b183cc","Type":"ContainerDied","Data":"4e875c298086ffb9ebb3f07e90b9e6dc438dee12acdb58f49c555d9a813972d4"} Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.354311 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.545301 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e04bb36c-6187-457b-90f7-134b59b183cc-config-data\") pod \"e04bb36c-6187-457b-90f7-134b59b183cc\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.545379 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmd52\" (UniqueName: \"kubernetes.io/projected/e04bb36c-6187-457b-90f7-134b59b183cc-kube-api-access-zmd52\") pod \"e04bb36c-6187-457b-90f7-134b59b183cc\" (UID: \"e04bb36c-6187-457b-90f7-134b59b183cc\") " Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.552020 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e04bb36c-6187-457b-90f7-134b59b183cc-kube-api-access-zmd52" (OuterVolumeSpecName: "kube-api-access-zmd52") pod "e04bb36c-6187-457b-90f7-134b59b183cc" (UID: "e04bb36c-6187-457b-90f7-134b59b183cc"). InnerVolumeSpecName "kube-api-access-zmd52". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.584518 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e04bb36c-6187-457b-90f7-134b59b183cc-config-data" (OuterVolumeSpecName: "config-data") pod "e04bb36c-6187-457b-90f7-134b59b183cc" (UID: "e04bb36c-6187-457b-90f7-134b59b183cc"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.646706 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/e04bb36c-6187-457b-90f7-134b59b183cc-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:42 crc kubenswrapper[4702]: I1125 10:55:42.646771 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmd52\" (UniqueName: \"kubernetes.io/projected/e04bb36c-6187-457b-90f7-134b59b183cc-kube-api-access-zmd52\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.102231 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" event={"ID":"e04bb36c-6187-457b-90f7-134b59b183cc","Type":"ContainerDied","Data":"576a85c4992dcff84dd4ab8c894b0cb7aad64e24cf8be5102ed06b9201f8d1ef"} Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.102286 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="576a85c4992dcff84dd4ab8c894b0cb7aad64e24cf8be5102ed06b9201f8d1ef" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.102374 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-z45zz" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.268974 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-zrxkl"] Nov 25 10:55:43 crc kubenswrapper[4702]: E1125 10:55:43.269288 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e04bb36c-6187-457b-90f7-134b59b183cc" containerName="keystone-db-sync" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.269309 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e04bb36c-6187-457b-90f7-134b59b183cc" containerName="keystone-db-sync" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.269478 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e04bb36c-6187-457b-90f7-134b59b183cc" containerName="keystone-db-sync" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.270014 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.273199 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.273508 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.273730 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.273944 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.274411 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-qwcxs" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.286568 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-zrxkl"] Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.457810 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-scripts\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.457874 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-fernet-keys\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.457955 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-credential-keys\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.457978 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-config-data\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.458063 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dskwx\" (UniqueName: \"kubernetes.io/projected/85fe0fd2-f262-4538-a742-e83388292737-kube-api-access-dskwx\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.558936 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-fernet-keys\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.559007 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-credential-keys\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.559046 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-config-data\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.559104 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dskwx\" (UniqueName: \"kubernetes.io/projected/85fe0fd2-f262-4538-a742-e83388292737-kube-api-access-dskwx\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.559142 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-scripts\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.560495 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.560801 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.561152 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.573701 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-scripts\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.574032 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-config-data\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.574227 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-fernet-keys\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.574803 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dskwx\" (UniqueName: \"kubernetes.io/projected/85fe0fd2-f262-4538-a742-e83388292737-kube-api-access-dskwx\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.575476 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-credential-keys\") pod \"keystone-bootstrap-zrxkl\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.586741 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-qwcxs" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.591231 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.591284 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.591322 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.591871 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"272d347b9d4642895dedc11ba1aec7becf7520474f3145b69f14b52ec045a606"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.591950 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://272d347b9d4642895dedc11ba1aec7becf7520474f3145b69f14b52ec045a606" gracePeriod=600 Nov 25 10:55:43 crc kubenswrapper[4702]: I1125 10:55:43.595104 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:44 crc kubenswrapper[4702]: I1125 10:55:44.099140 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-zrxkl"] Nov 25 10:55:44 crc kubenswrapper[4702]: W1125 10:55:44.103247 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod85fe0fd2_f262_4538_a742_e83388292737.slice/crio-00e675e94c52d412cd4ac724dce558ff01d3c2793f56a02bcfb299494bc187bf WatchSource:0}: Error finding container 00e675e94c52d412cd4ac724dce558ff01d3c2793f56a02bcfb299494bc187bf: Status 404 returned error can't find the container with id 00e675e94c52d412cd4ac724dce558ff01d3c2793f56a02bcfb299494bc187bf Nov 25 10:55:44 crc kubenswrapper[4702]: I1125 10:55:44.108421 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:55:44 crc kubenswrapper[4702]: I1125 10:55:44.122849 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="272d347b9d4642895dedc11ba1aec7becf7520474f3145b69f14b52ec045a606" exitCode=0 Nov 25 10:55:44 crc kubenswrapper[4702]: I1125 10:55:44.122951 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"272d347b9d4642895dedc11ba1aec7becf7520474f3145b69f14b52ec045a606"} Nov 25 10:55:44 crc kubenswrapper[4702]: I1125 10:55:44.123052 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659"} Nov 25 10:55:44 crc kubenswrapper[4702]: I1125 10:55:44.123078 4702 scope.go:117] "RemoveContainer" containerID="c995a3a58802015484aaf059ef2d7a1f54e8b9c0222aaf2fd6574984d2674473" Nov 25 10:55:45 crc kubenswrapper[4702]: I1125 10:55:45.132673 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" event={"ID":"85fe0fd2-f262-4538-a742-e83388292737","Type":"ContainerStarted","Data":"3ad22f60c132479ae514676fafd59a0fd968eab76e66cd462427c70426704f8c"} Nov 25 10:55:45 crc kubenswrapper[4702]: I1125 10:55:45.134852 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" event={"ID":"85fe0fd2-f262-4538-a742-e83388292737","Type":"ContainerStarted","Data":"00e675e94c52d412cd4ac724dce558ff01d3c2793f56a02bcfb299494bc187bf"} Nov 25 10:55:45 crc kubenswrapper[4702]: I1125 10:55:45.155874 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" podStartSLOduration=2.155858788 podStartE2EDuration="2.155858788s" podCreationTimestamp="2025-11-25 10:55:43 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:55:45.151934416 +0000 UTC m=+1442.518530115" watchObservedRunningTime="2025-11-25 10:55:45.155858788 +0000 UTC m=+1442.522454477" Nov 25 10:55:47 crc kubenswrapper[4702]: I1125 10:55:47.147755 4702 generic.go:334] "Generic (PLEG): container finished" podID="85fe0fd2-f262-4538-a742-e83388292737" containerID="3ad22f60c132479ae514676fafd59a0fd968eab76e66cd462427c70426704f8c" exitCode=0 Nov 25 10:55:47 crc kubenswrapper[4702]: I1125 10:55:47.147828 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" event={"ID":"85fe0fd2-f262-4538-a742-e83388292737","Type":"ContainerDied","Data":"3ad22f60c132479ae514676fafd59a0fd968eab76e66cd462427c70426704f8c"} Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.416175 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.421689 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-config-data\") pod \"85fe0fd2-f262-4538-a742-e83388292737\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.421732 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-fernet-keys\") pod \"85fe0fd2-f262-4538-a742-e83388292737\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.421791 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-credential-keys\") pod \"85fe0fd2-f262-4538-a742-e83388292737\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.421824 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dskwx\" (UniqueName: \"kubernetes.io/projected/85fe0fd2-f262-4538-a742-e83388292737-kube-api-access-dskwx\") pod \"85fe0fd2-f262-4538-a742-e83388292737\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.429063 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "85fe0fd2-f262-4538-a742-e83388292737" (UID: "85fe0fd2-f262-4538-a742-e83388292737"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.429784 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85fe0fd2-f262-4538-a742-e83388292737-kube-api-access-dskwx" (OuterVolumeSpecName: "kube-api-access-dskwx") pod "85fe0fd2-f262-4538-a742-e83388292737" (UID: "85fe0fd2-f262-4538-a742-e83388292737"). InnerVolumeSpecName "kube-api-access-dskwx". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.430105 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "85fe0fd2-f262-4538-a742-e83388292737" (UID: "85fe0fd2-f262-4538-a742-e83388292737"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.449091 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-config-data" (OuterVolumeSpecName: "config-data") pod "85fe0fd2-f262-4538-a742-e83388292737" (UID: "85fe0fd2-f262-4538-a742-e83388292737"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.522774 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-scripts\") pod \"85fe0fd2-f262-4538-a742-e83388292737\" (UID: \"85fe0fd2-f262-4538-a742-e83388292737\") " Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.523210 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.523231 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.523242 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.523255 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dskwx\" (UniqueName: \"kubernetes.io/projected/85fe0fd2-f262-4538-a742-e83388292737-kube-api-access-dskwx\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.525362 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-scripts" (OuterVolumeSpecName: "scripts") pod "85fe0fd2-f262-4538-a742-e83388292737" (UID: "85fe0fd2-f262-4538-a742-e83388292737"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:55:48 crc kubenswrapper[4702]: I1125 10:55:48.624172 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/85fe0fd2-f262-4538-a742-e83388292737-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.165351 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" event={"ID":"85fe0fd2-f262-4538-a742-e83388292737","Type":"ContainerDied","Data":"00e675e94c52d412cd4ac724dce558ff01d3c2793f56a02bcfb299494bc187bf"} Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.165393 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="00e675e94c52d412cd4ac724dce558ff01d3c2793f56a02bcfb299494bc187bf" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.165428 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-zrxkl" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.236264 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-57ccd7546d-qk4vk"] Nov 25 10:55:49 crc kubenswrapper[4702]: E1125 10:55:49.236918 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85fe0fd2-f262-4538-a742-e83388292737" containerName="keystone-bootstrap" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.236936 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="85fe0fd2-f262-4538-a742-e83388292737" containerName="keystone-bootstrap" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.237098 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="85fe0fd2-f262-4538-a742-e83388292737" containerName="keystone-bootstrap" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.237671 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.239590 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-qwcxs" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.239928 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.240146 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.240965 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.249922 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-57ccd7546d-qk4vk"] Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.436389 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-credential-keys\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.436449 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-scripts\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.436474 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6qsvf\" (UniqueName: \"kubernetes.io/projected/b20f86c7-2b46-4295-b17d-3e2afba66b05-kube-api-access-6qsvf\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.436612 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-config-data\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.436689 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-fernet-keys\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.537881 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-fernet-keys\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.538020 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-credential-keys\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.538045 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-scripts\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.538067 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6qsvf\" (UniqueName: \"kubernetes.io/projected/b20f86c7-2b46-4295-b17d-3e2afba66b05-kube-api-access-6qsvf\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.538089 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-config-data\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.542606 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-scripts\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.542795 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-credential-keys\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.543280 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-config-data\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.544055 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-fernet-keys\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.557218 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6qsvf\" (UniqueName: \"kubernetes.io/projected/b20f86c7-2b46-4295-b17d-3e2afba66b05-kube-api-access-6qsvf\") pod \"keystone-57ccd7546d-qk4vk\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:49 crc kubenswrapper[4702]: I1125 10:55:49.853549 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:50 crc kubenswrapper[4702]: I1125 10:55:50.243423 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-57ccd7546d-qk4vk"] Nov 25 10:55:51 crc kubenswrapper[4702]: I1125 10:55:51.187963 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" event={"ID":"b20f86c7-2b46-4295-b17d-3e2afba66b05","Type":"ContainerStarted","Data":"8b0436c0484380f7e696204bb8a580915ece15dc8349a5cfa858468e7bb702ea"} Nov 25 10:55:51 crc kubenswrapper[4702]: I1125 10:55:51.188357 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" event={"ID":"b20f86c7-2b46-4295-b17d-3e2afba66b05","Type":"ContainerStarted","Data":"1903a6cf032c29995d950c22233e1ada2bf0846aba752701585a523984bbfc3c"} Nov 25 10:55:51 crc kubenswrapper[4702]: I1125 10:55:51.188380 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:55:51 crc kubenswrapper[4702]: I1125 10:55:51.207511 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" podStartSLOduration=2.207495142 podStartE2EDuration="2.207495142s" podCreationTimestamp="2025-11-25 10:55:49 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:55:51.204120186 +0000 UTC m=+1448.570715875" watchObservedRunningTime="2025-11-25 10:55:51.207495142 +0000 UTC m=+1448.574090831" Nov 25 10:56:21 crc kubenswrapper[4702]: I1125 10:56:21.350786 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.350274 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-z45zz"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.353288 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-zrxkl"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.359353 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-z45zz"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.364686 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-zrxkl"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.372578 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-57ccd7546d-qk4vk"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.372836 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" podUID="b20f86c7-2b46-4295-b17d-3e2afba66b05" containerName="keystone-api" containerID="cri-o://8b0436c0484380f7e696204bb8a580915ece15dc8349a5cfa858468e7bb702ea" gracePeriod=30 Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.402480 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone05de-account-delete-kkd5z"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.403781 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.414151 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone05de-account-delete-kkd5z"] Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.578989 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4xjtz\" (UniqueName: \"kubernetes.io/projected/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-kube-api-access-4xjtz\") pod \"keystone05de-account-delete-kkd5z\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.579088 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-operator-scripts\") pod \"keystone05de-account-delete-kkd5z\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.680809 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-operator-scripts\") pod \"keystone05de-account-delete-kkd5z\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.680963 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4xjtz\" (UniqueName: \"kubernetes.io/projected/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-kube-api-access-4xjtz\") pod \"keystone05de-account-delete-kkd5z\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.681718 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-operator-scripts\") pod \"keystone05de-account-delete-kkd5z\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.701549 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4xjtz\" (UniqueName: \"kubernetes.io/projected/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-kube-api-access-4xjtz\") pod \"keystone05de-account-delete-kkd5z\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.727279 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:36 crc kubenswrapper[4702]: I1125 10:56:36.926656 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone05de-account-delete-kkd5z"] Nov 25 10:56:37 crc kubenswrapper[4702]: I1125 10:56:37.415167 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85fe0fd2-f262-4538-a742-e83388292737" path="/var/lib/kubelet/pods/85fe0fd2-f262-4538-a742-e83388292737/volumes" Nov 25 10:56:37 crc kubenswrapper[4702]: I1125 10:56:37.416091 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e04bb36c-6187-457b-90f7-134b59b183cc" path="/var/lib/kubelet/pods/e04bb36c-6187-457b-90f7-134b59b183cc/volumes" Nov 25 10:56:37 crc kubenswrapper[4702]: I1125 10:56:37.779944 4702 generic.go:334] "Generic (PLEG): container finished" podID="1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" containerID="4e138d7ee57296d1aaf2529a2e690781275dcdcf3d3b187f22ee5617e4023e81" exitCode=0 Nov 25 10:56:37 crc kubenswrapper[4702]: I1125 10:56:37.780010 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" event={"ID":"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35","Type":"ContainerDied","Data":"4e138d7ee57296d1aaf2529a2e690781275dcdcf3d3b187f22ee5617e4023e81"} Nov 25 10:56:37 crc kubenswrapper[4702]: I1125 10:56:37.780078 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" event={"ID":"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35","Type":"ContainerStarted","Data":"f372b113316fbdbe26aea50edd34b90d6eb554fd56a6f01ece434aa6e344c5c6"} Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.076368 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.217213 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-operator-scripts\") pod \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.217278 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4xjtz\" (UniqueName: \"kubernetes.io/projected/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-kube-api-access-4xjtz\") pod \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\" (UID: \"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35\") " Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.218022 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" (UID: "1c118cd4-30c3-4772-b1ca-c5d3a57f1c35"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.222246 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-kube-api-access-4xjtz" (OuterVolumeSpecName: "kube-api-access-4xjtz") pod "1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" (UID: "1c118cd4-30c3-4772-b1ca-c5d3a57f1c35"). InnerVolumeSpecName "kube-api-access-4xjtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.318794 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.318833 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4xjtz\" (UniqueName: \"kubernetes.io/projected/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35-kube-api-access-4xjtz\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.795545 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" event={"ID":"1c118cd4-30c3-4772-b1ca-c5d3a57f1c35","Type":"ContainerDied","Data":"f372b113316fbdbe26aea50edd34b90d6eb554fd56a6f01ece434aa6e344c5c6"} Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.795587 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f372b113316fbdbe26aea50edd34b90d6eb554fd56a6f01ece434aa6e344c5c6" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.795583 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone05de-account-delete-kkd5z" Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.797546 4702 generic.go:334] "Generic (PLEG): container finished" podID="b20f86c7-2b46-4295-b17d-3e2afba66b05" containerID="8b0436c0484380f7e696204bb8a580915ece15dc8349a5cfa858468e7bb702ea" exitCode=0 Nov 25 10:56:39 crc kubenswrapper[4702]: I1125 10:56:39.797604 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" event={"ID":"b20f86c7-2b46-4295-b17d-3e2afba66b05","Type":"ContainerDied","Data":"8b0436c0484380f7e696204bb8a580915ece15dc8349a5cfa858468e7bb702ea"} Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.345786 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.535549 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6qsvf\" (UniqueName: \"kubernetes.io/projected/b20f86c7-2b46-4295-b17d-3e2afba66b05-kube-api-access-6qsvf\") pod \"b20f86c7-2b46-4295-b17d-3e2afba66b05\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.535620 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-credential-keys\") pod \"b20f86c7-2b46-4295-b17d-3e2afba66b05\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.535672 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-config-data\") pod \"b20f86c7-2b46-4295-b17d-3e2afba66b05\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.535719 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-scripts\") pod \"b20f86c7-2b46-4295-b17d-3e2afba66b05\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.535749 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-fernet-keys\") pod \"b20f86c7-2b46-4295-b17d-3e2afba66b05\" (UID: \"b20f86c7-2b46-4295-b17d-3e2afba66b05\") " Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.539625 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b20f86c7-2b46-4295-b17d-3e2afba66b05-kube-api-access-6qsvf" (OuterVolumeSpecName: "kube-api-access-6qsvf") pod "b20f86c7-2b46-4295-b17d-3e2afba66b05" (UID: "b20f86c7-2b46-4295-b17d-3e2afba66b05"). InnerVolumeSpecName "kube-api-access-6qsvf". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.540220 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "b20f86c7-2b46-4295-b17d-3e2afba66b05" (UID: "b20f86c7-2b46-4295-b17d-3e2afba66b05"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.540896 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-scripts" (OuterVolumeSpecName: "scripts") pod "b20f86c7-2b46-4295-b17d-3e2afba66b05" (UID: "b20f86c7-2b46-4295-b17d-3e2afba66b05"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.541652 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "b20f86c7-2b46-4295-b17d-3e2afba66b05" (UID: "b20f86c7-2b46-4295-b17d-3e2afba66b05"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.558072 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-config-data" (OuterVolumeSpecName: "config-data") pod "b20f86c7-2b46-4295-b17d-3e2afba66b05" (UID: "b20f86c7-2b46-4295-b17d-3e2afba66b05"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.637818 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6qsvf\" (UniqueName: \"kubernetes.io/projected/b20f86c7-2b46-4295-b17d-3e2afba66b05-kube-api-access-6qsvf\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.637851 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.637862 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.637871 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.637880 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/b20f86c7-2b46-4295-b17d-3e2afba66b05-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.806035 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" event={"ID":"b20f86c7-2b46-4295-b17d-3e2afba66b05","Type":"ContainerDied","Data":"1903a6cf032c29995d950c22233e1ada2bf0846aba752701585a523984bbfc3c"} Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.806101 4702 scope.go:117] "RemoveContainer" containerID="8b0436c0484380f7e696204bb8a580915ece15dc8349a5cfa858468e7bb702ea" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.806106 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-57ccd7546d-qk4vk" Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.846738 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-57ccd7546d-qk4vk"] Nov 25 10:56:40 crc kubenswrapper[4702]: I1125 10:56:40.851609 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-57ccd7546d-qk4vk"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.410808 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b20f86c7-2b46-4295-b17d-3e2afba66b05" path="/var/lib/kubelet/pods/b20f86c7-2b46-4295-b17d-3e2afba66b05/volumes" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.432848 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone05de-account-delete-kkd5z"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.441052 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.445965 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-m2h94"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.450738 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone05de-account-delete-kkd5z"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.455468 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-m2h94"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.459088 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-05de-account-create-update-4dq9q"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.672573 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-create-49kqw"] Nov 25 10:56:41 crc kubenswrapper[4702]: E1125 10:56:41.672872 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" containerName="mariadb-account-delete" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.672892 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" containerName="mariadb-account-delete" Nov 25 10:56:41 crc kubenswrapper[4702]: E1125 10:56:41.672929 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b20f86c7-2b46-4295-b17d-3e2afba66b05" containerName="keystone-api" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.672939 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="b20f86c7-2b46-4295-b17d-3e2afba66b05" containerName="keystone-api" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.673102 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="b20f86c7-2b46-4295-b17d-3e2afba66b05" containerName="keystone-api" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.673134 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" containerName="mariadb-account-delete" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.673609 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.685815 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.687011 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.689740 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-db-secret" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.695338 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-49kqw"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.702344 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj"] Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.856566 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6xxr\" (UniqueName: \"kubernetes.io/projected/a18904e0-0c8c-4dc4-b018-6edaed969779-kube-api-access-g6xxr\") pod \"keystone-db-create-49kqw\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.857040 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75008377-9dc3-4121-8794-e51ad732a7ca-operator-scripts\") pod \"keystone-2de6-account-create-update-hxmqj\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.857225 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hmrr9\" (UniqueName: \"kubernetes.io/projected/75008377-9dc3-4121-8794-e51ad732a7ca-kube-api-access-hmrr9\") pod \"keystone-2de6-account-create-update-hxmqj\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.857446 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18904e0-0c8c-4dc4-b018-6edaed969779-operator-scripts\") pod \"keystone-db-create-49kqw\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.959114 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75008377-9dc3-4121-8794-e51ad732a7ca-operator-scripts\") pod \"keystone-2de6-account-create-update-hxmqj\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.959196 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hmrr9\" (UniqueName: \"kubernetes.io/projected/75008377-9dc3-4121-8794-e51ad732a7ca-kube-api-access-hmrr9\") pod \"keystone-2de6-account-create-update-hxmqj\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.959256 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18904e0-0c8c-4dc4-b018-6edaed969779-operator-scripts\") pod \"keystone-db-create-49kqw\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.959287 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6xxr\" (UniqueName: \"kubernetes.io/projected/a18904e0-0c8c-4dc4-b018-6edaed969779-kube-api-access-g6xxr\") pod \"keystone-db-create-49kqw\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.960262 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75008377-9dc3-4121-8794-e51ad732a7ca-operator-scripts\") pod \"keystone-2de6-account-create-update-hxmqj\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.960428 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18904e0-0c8c-4dc4-b018-6edaed969779-operator-scripts\") pod \"keystone-db-create-49kqw\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.979958 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hmrr9\" (UniqueName: \"kubernetes.io/projected/75008377-9dc3-4121-8794-e51ad732a7ca-kube-api-access-hmrr9\") pod \"keystone-2de6-account-create-update-hxmqj\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.980456 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6xxr\" (UniqueName: \"kubernetes.io/projected/a18904e0-0c8c-4dc4-b018-6edaed969779-kube-api-access-g6xxr\") pod \"keystone-db-create-49kqw\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:41 crc kubenswrapper[4702]: I1125 10:56:41.993758 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.004421 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.459118 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj"] Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.497823 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-49kqw"] Nov 25 10:56:42 crc kubenswrapper[4702]: W1125 10:56:42.505501 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda18904e0_0c8c_4dc4_b018_6edaed969779.slice/crio-a943d56ebedcfd03cf25e956edfb1fdd089cd63cac364b32750987792f82819b WatchSource:0}: Error finding container a943d56ebedcfd03cf25e956edfb1fdd089cd63cac364b32750987792f82819b: Status 404 returned error can't find the container with id a943d56ebedcfd03cf25e956edfb1fdd089cd63cac364b32750987792f82819b Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.844710 4702 generic.go:334] "Generic (PLEG): container finished" podID="a18904e0-0c8c-4dc4-b018-6edaed969779" containerID="4562ccc282fea249e6526624cbde8b21303cfe47a746b8fd52535047200d259e" exitCode=0 Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.844887 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-49kqw" event={"ID":"a18904e0-0c8c-4dc4-b018-6edaed969779","Type":"ContainerDied","Data":"4562ccc282fea249e6526624cbde8b21303cfe47a746b8fd52535047200d259e"} Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.845061 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-49kqw" event={"ID":"a18904e0-0c8c-4dc4-b018-6edaed969779","Type":"ContainerStarted","Data":"a943d56ebedcfd03cf25e956edfb1fdd089cd63cac364b32750987792f82819b"} Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.846571 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" event={"ID":"75008377-9dc3-4121-8794-e51ad732a7ca","Type":"ContainerStarted","Data":"99c2bf1f80796866704cf7a2c6ec2eddee465827c5b4b17e1ca59501791a4b11"} Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.846604 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" event={"ID":"75008377-9dc3-4121-8794-e51ad732a7ca","Type":"ContainerStarted","Data":"f65eef124a1f37f7c180d15a8ca4a5607918ae4daa87b2902c9da3c75f6d0fbc"} Nov 25 10:56:42 crc kubenswrapper[4702]: I1125 10:56:42.880738 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" podStartSLOduration=1.880719573 podStartE2EDuration="1.880719573s" podCreationTimestamp="2025-11-25 10:56:41 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:56:42.877532152 +0000 UTC m=+1500.244127851" watchObservedRunningTime="2025-11-25 10:56:42.880719573 +0000 UTC m=+1500.247315262" Nov 25 10:56:43 crc kubenswrapper[4702]: I1125 10:56:43.419055 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1c118cd4-30c3-4772-b1ca-c5d3a57f1c35" path="/var/lib/kubelet/pods/1c118cd4-30c3-4772-b1ca-c5d3a57f1c35/volumes" Nov 25 10:56:43 crc kubenswrapper[4702]: I1125 10:56:43.419590 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88932d47-06cc-4724-aedc-642bc0e5bdfa" path="/var/lib/kubelet/pods/88932d47-06cc-4724-aedc-642bc0e5bdfa/volumes" Nov 25 10:56:43 crc kubenswrapper[4702]: I1125 10:56:43.420139 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ded2cbb5-18e5-461f-8078-e9e564b1ed49" path="/var/lib/kubelet/pods/ded2cbb5-18e5-461f-8078-e9e564b1ed49/volumes" Nov 25 10:56:43 crc kubenswrapper[4702]: I1125 10:56:43.855120 4702 generic.go:334] "Generic (PLEG): container finished" podID="75008377-9dc3-4121-8794-e51ad732a7ca" containerID="99c2bf1f80796866704cf7a2c6ec2eddee465827c5b4b17e1ca59501791a4b11" exitCode=0 Nov 25 10:56:43 crc kubenswrapper[4702]: I1125 10:56:43.855475 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" event={"ID":"75008377-9dc3-4121-8794-e51ad732a7ca","Type":"ContainerDied","Data":"99c2bf1f80796866704cf7a2c6ec2eddee465827c5b4b17e1ca59501791a4b11"} Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.086758 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.189889 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18904e0-0c8c-4dc4-b018-6edaed969779-operator-scripts\") pod \"a18904e0-0c8c-4dc4-b018-6edaed969779\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.190316 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6xxr\" (UniqueName: \"kubernetes.io/projected/a18904e0-0c8c-4dc4-b018-6edaed969779-kube-api-access-g6xxr\") pod \"a18904e0-0c8c-4dc4-b018-6edaed969779\" (UID: \"a18904e0-0c8c-4dc4-b018-6edaed969779\") " Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.190739 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a18904e0-0c8c-4dc4-b018-6edaed969779-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a18904e0-0c8c-4dc4-b018-6edaed969779" (UID: "a18904e0-0c8c-4dc4-b018-6edaed969779"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.200016 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a18904e0-0c8c-4dc4-b018-6edaed969779-kube-api-access-g6xxr" (OuterVolumeSpecName: "kube-api-access-g6xxr") pod "a18904e0-0c8c-4dc4-b018-6edaed969779" (UID: "a18904e0-0c8c-4dc4-b018-6edaed969779"). InnerVolumeSpecName "kube-api-access-g6xxr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.292153 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a18904e0-0c8c-4dc4-b018-6edaed969779-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.292198 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6xxr\" (UniqueName: \"kubernetes.io/projected/a18904e0-0c8c-4dc4-b018-6edaed969779-kube-api-access-g6xxr\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.863627 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-create-49kqw" event={"ID":"a18904e0-0c8c-4dc4-b018-6edaed969779","Type":"ContainerDied","Data":"a943d56ebedcfd03cf25e956edfb1fdd089cd63cac364b32750987792f82819b"} Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.865081 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a943d56ebedcfd03cf25e956edfb1fdd089cd63cac364b32750987792f82819b" Nov 25 10:56:44 crc kubenswrapper[4702]: I1125 10:56:44.863656 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-create-49kqw" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.133230 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.307210 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75008377-9dc3-4121-8794-e51ad732a7ca-operator-scripts\") pod \"75008377-9dc3-4121-8794-e51ad732a7ca\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.307287 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hmrr9\" (UniqueName: \"kubernetes.io/projected/75008377-9dc3-4121-8794-e51ad732a7ca-kube-api-access-hmrr9\") pod \"75008377-9dc3-4121-8794-e51ad732a7ca\" (UID: \"75008377-9dc3-4121-8794-e51ad732a7ca\") " Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.308078 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/75008377-9dc3-4121-8794-e51ad732a7ca-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "75008377-9dc3-4121-8794-e51ad732a7ca" (UID: "75008377-9dc3-4121-8794-e51ad732a7ca"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.314360 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/75008377-9dc3-4121-8794-e51ad732a7ca-kube-api-access-hmrr9" (OuterVolumeSpecName: "kube-api-access-hmrr9") pod "75008377-9dc3-4121-8794-e51ad732a7ca" (UID: "75008377-9dc3-4121-8794-e51ad732a7ca"). InnerVolumeSpecName "kube-api-access-hmrr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.408710 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/75008377-9dc3-4121-8794-e51ad732a7ca-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.408755 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hmrr9\" (UniqueName: \"kubernetes.io/projected/75008377-9dc3-4121-8794-e51ad732a7ca-kube-api-access-hmrr9\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.872126 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" event={"ID":"75008377-9dc3-4121-8794-e51ad732a7ca","Type":"ContainerDied","Data":"f65eef124a1f37f7c180d15a8ca4a5607918ae4daa87b2902c9da3c75f6d0fbc"} Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.872154 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj" Nov 25 10:56:45 crc kubenswrapper[4702]: I1125 10:56:45.872173 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f65eef124a1f37f7c180d15a8ca4a5607918ae4daa87b2902c9da3c75f6d0fbc" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.130842 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-lwm4x"] Nov 25 10:56:47 crc kubenswrapper[4702]: E1125 10:56:47.131408 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a18904e0-0c8c-4dc4-b018-6edaed969779" containerName="mariadb-database-create" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.131424 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a18904e0-0c8c-4dc4-b018-6edaed969779" containerName="mariadb-database-create" Nov 25 10:56:47 crc kubenswrapper[4702]: E1125 10:56:47.131435 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="75008377-9dc3-4121-8794-e51ad732a7ca" containerName="mariadb-account-create-update" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.131441 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="75008377-9dc3-4121-8794-e51ad732a7ca" containerName="mariadb-account-create-update" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.131559 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="75008377-9dc3-4121-8794-e51ad732a7ca" containerName="mariadb-account-create-update" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.131572 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a18904e0-0c8c-4dc4-b018-6edaed969779" containerName="mariadb-database-create" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.131998 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.133773 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.134029 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.134372 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-8w8n8" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.141083 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.141133 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-lwm4x"] Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.232433 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae07277-35e9-408c-ad38-080789945cc7-config-data\") pod \"keystone-db-sync-lwm4x\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.232520 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r7v2b\" (UniqueName: \"kubernetes.io/projected/8ae07277-35e9-408c-ad38-080789945cc7-kube-api-access-r7v2b\") pod \"keystone-db-sync-lwm4x\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.333526 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r7v2b\" (UniqueName: \"kubernetes.io/projected/8ae07277-35e9-408c-ad38-080789945cc7-kube-api-access-r7v2b\") pod \"keystone-db-sync-lwm4x\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.333662 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae07277-35e9-408c-ad38-080789945cc7-config-data\") pod \"keystone-db-sync-lwm4x\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.338382 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae07277-35e9-408c-ad38-080789945cc7-config-data\") pod \"keystone-db-sync-lwm4x\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.351200 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r7v2b\" (UniqueName: \"kubernetes.io/projected/8ae07277-35e9-408c-ad38-080789945cc7-kube-api-access-r7v2b\") pod \"keystone-db-sync-lwm4x\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.454244 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.860575 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-lwm4x"] Nov 25 10:56:47 crc kubenswrapper[4702]: W1125 10:56:47.863271 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ae07277_35e9_408c_ad38_080789945cc7.slice/crio-9d09dbe928c61721bbf6cd9e58c7ea881493684d0d36babe0bb4ec87e24cb7f4 WatchSource:0}: Error finding container 9d09dbe928c61721bbf6cd9e58c7ea881493684d0d36babe0bb4ec87e24cb7f4: Status 404 returned error can't find the container with id 9d09dbe928c61721bbf6cd9e58c7ea881493684d0d36babe0bb4ec87e24cb7f4 Nov 25 10:56:47 crc kubenswrapper[4702]: I1125 10:56:47.885049 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" event={"ID":"8ae07277-35e9-408c-ad38-080789945cc7","Type":"ContainerStarted","Data":"9d09dbe928c61721bbf6cd9e58c7ea881493684d0d36babe0bb4ec87e24cb7f4"} Nov 25 10:56:48 crc kubenswrapper[4702]: I1125 10:56:48.909839 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" event={"ID":"8ae07277-35e9-408c-ad38-080789945cc7","Type":"ContainerStarted","Data":"1ab8ef1b3e8a80345048473e2acb198396d893efb47cbd6886b76860ca776277"} Nov 25 10:56:48 crc kubenswrapper[4702]: I1125 10:56:48.964791 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" podStartSLOduration=1.964758387 podStartE2EDuration="1.964758387s" podCreationTimestamp="2025-11-25 10:56:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:56:48.959209339 +0000 UTC m=+1506.325805028" watchObservedRunningTime="2025-11-25 10:56:48.964758387 +0000 UTC m=+1506.331354086" Nov 25 10:56:49 crc kubenswrapper[4702]: I1125 10:56:49.918313 4702 generic.go:334] "Generic (PLEG): container finished" podID="8ae07277-35e9-408c-ad38-080789945cc7" containerID="1ab8ef1b3e8a80345048473e2acb198396d893efb47cbd6886b76860ca776277" exitCode=0 Nov 25 10:56:49 crc kubenswrapper[4702]: I1125 10:56:49.918351 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" event={"ID":"8ae07277-35e9-408c-ad38-080789945cc7","Type":"ContainerDied","Data":"1ab8ef1b3e8a80345048473e2acb198396d893efb47cbd6886b76860ca776277"} Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.176980 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.286330 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-r7v2b\" (UniqueName: \"kubernetes.io/projected/8ae07277-35e9-408c-ad38-080789945cc7-kube-api-access-r7v2b\") pod \"8ae07277-35e9-408c-ad38-080789945cc7\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.286420 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae07277-35e9-408c-ad38-080789945cc7-config-data\") pod \"8ae07277-35e9-408c-ad38-080789945cc7\" (UID: \"8ae07277-35e9-408c-ad38-080789945cc7\") " Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.299271 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ae07277-35e9-408c-ad38-080789945cc7-kube-api-access-r7v2b" (OuterVolumeSpecName: "kube-api-access-r7v2b") pod "8ae07277-35e9-408c-ad38-080789945cc7" (UID: "8ae07277-35e9-408c-ad38-080789945cc7"). InnerVolumeSpecName "kube-api-access-r7v2b". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.327443 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ae07277-35e9-408c-ad38-080789945cc7-config-data" (OuterVolumeSpecName: "config-data") pod "8ae07277-35e9-408c-ad38-080789945cc7" (UID: "8ae07277-35e9-408c-ad38-080789945cc7"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.388351 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-r7v2b\" (UniqueName: \"kubernetes.io/projected/8ae07277-35e9-408c-ad38-080789945cc7-kube-api-access-r7v2b\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.388396 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ae07277-35e9-408c-ad38-080789945cc7-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.936417 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" event={"ID":"8ae07277-35e9-408c-ad38-080789945cc7","Type":"ContainerDied","Data":"9d09dbe928c61721bbf6cd9e58c7ea881493684d0d36babe0bb4ec87e24cb7f4"} Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.936670 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9d09dbe928c61721bbf6cd9e58c7ea881493684d0d36babe0bb4ec87e24cb7f4" Nov 25 10:56:51 crc kubenswrapper[4702]: I1125 10:56:51.936500 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-db-sync-lwm4x" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.381563 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-56m29"] Nov 25 10:56:52 crc kubenswrapper[4702]: E1125 10:56:52.382071 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ae07277-35e9-408c-ad38-080789945cc7" containerName="keystone-db-sync" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.382093 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ae07277-35e9-408c-ad38-080789945cc7" containerName="keystone-db-sync" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.382275 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ae07277-35e9-408c-ad38-080789945cc7" containerName="keystone-db-sync" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.382936 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.385926 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"osp-secret" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.387685 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.388334 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.388593 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-8w8n8" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.388863 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.393546 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-56m29"] Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.502466 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vd728\" (UniqueName: \"kubernetes.io/projected/ba8bde50-661d-47cd-baf0-d1f137fe82ba-kube-api-access-vd728\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.502521 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-config-data\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.502574 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-credential-keys\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.502692 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-scripts\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.502716 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-fernet-keys\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.604209 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-scripts\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.604281 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-fernet-keys\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.604401 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vd728\" (UniqueName: \"kubernetes.io/projected/ba8bde50-661d-47cd-baf0-d1f137fe82ba-kube-api-access-vd728\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.604439 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-config-data\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.604487 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-credential-keys\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.608399 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-scripts\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.609743 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-credential-keys\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.611229 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-fernet-keys\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.614016 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-config-data\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.632121 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vd728\" (UniqueName: \"kubernetes.io/projected/ba8bde50-661d-47cd-baf0-d1f137fe82ba-kube-api-access-vd728\") pod \"keystone-bootstrap-56m29\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:52 crc kubenswrapper[4702]: I1125 10:56:52.705338 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:53 crc kubenswrapper[4702]: I1125 10:56:53.114579 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-56m29"] Nov 25 10:56:53 crc kubenswrapper[4702]: I1125 10:56:53.958170 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" event={"ID":"ba8bde50-661d-47cd-baf0-d1f137fe82ba","Type":"ContainerStarted","Data":"53741271cef622075ecd89f39df860a0400c9bb6d59b6ab51e0a5b65ef07a1b4"} Nov 25 10:56:53 crc kubenswrapper[4702]: I1125 10:56:53.958541 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" event={"ID":"ba8bde50-661d-47cd-baf0-d1f137fe82ba","Type":"ContainerStarted","Data":"50ff1b21c3493837239680d65a6b9856d1b82a92444a6cc9700e4db9a79e6c4b"} Nov 25 10:56:53 crc kubenswrapper[4702]: I1125 10:56:53.977296 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" podStartSLOduration=1.977277765 podStartE2EDuration="1.977277765s" podCreationTimestamp="2025-11-25 10:56:52 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:56:53.974256619 +0000 UTC m=+1511.340852318" watchObservedRunningTime="2025-11-25 10:56:53.977277765 +0000 UTC m=+1511.343873454" Nov 25 10:56:56 crc kubenswrapper[4702]: I1125 10:56:56.978638 4702 generic.go:334] "Generic (PLEG): container finished" podID="ba8bde50-661d-47cd-baf0-d1f137fe82ba" containerID="53741271cef622075ecd89f39df860a0400c9bb6d59b6ab51e0a5b65ef07a1b4" exitCode=0 Nov 25 10:56:56 crc kubenswrapper[4702]: I1125 10:56:56.978713 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" event={"ID":"ba8bde50-661d-47cd-baf0-d1f137fe82ba","Type":"ContainerDied","Data":"53741271cef622075ecd89f39df860a0400c9bb6d59b6ab51e0a5b65ef07a1b4"} Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.243358 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.385999 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-fernet-keys\") pod \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.386044 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-credential-keys\") pod \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.386064 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vd728\" (UniqueName: \"kubernetes.io/projected/ba8bde50-661d-47cd-baf0-d1f137fe82ba-kube-api-access-vd728\") pod \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.386087 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-scripts\") pod \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.386118 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-config-data\") pod \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\" (UID: \"ba8bde50-661d-47cd-baf0-d1f137fe82ba\") " Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.391645 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "ba8bde50-661d-47cd-baf0-d1f137fe82ba" (UID: "ba8bde50-661d-47cd-baf0-d1f137fe82ba"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.391700 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ba8bde50-661d-47cd-baf0-d1f137fe82ba-kube-api-access-vd728" (OuterVolumeSpecName: "kube-api-access-vd728") pod "ba8bde50-661d-47cd-baf0-d1f137fe82ba" (UID: "ba8bde50-661d-47cd-baf0-d1f137fe82ba"). InnerVolumeSpecName "kube-api-access-vd728". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.391712 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-scripts" (OuterVolumeSpecName: "scripts") pod "ba8bde50-661d-47cd-baf0-d1f137fe82ba" (UID: "ba8bde50-661d-47cd-baf0-d1f137fe82ba"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.393087 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "ba8bde50-661d-47cd-baf0-d1f137fe82ba" (UID: "ba8bde50-661d-47cd-baf0-d1f137fe82ba"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.409438 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-config-data" (OuterVolumeSpecName: "config-data") pod "ba8bde50-661d-47cd-baf0-d1f137fe82ba" (UID: "ba8bde50-661d-47cd-baf0-d1f137fe82ba"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.487305 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.487339 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.487351 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vd728\" (UniqueName: \"kubernetes.io/projected/ba8bde50-661d-47cd-baf0-d1f137fe82ba-kube-api-access-vd728\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.487359 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.487367 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/ba8bde50-661d-47cd-baf0-d1f137fe82ba-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.992721 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" event={"ID":"ba8bde50-661d-47cd-baf0-d1f137fe82ba","Type":"ContainerDied","Data":"50ff1b21c3493837239680d65a6b9856d1b82a92444a6cc9700e4db9a79e6c4b"} Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.992785 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="50ff1b21c3493837239680d65a6b9856d1b82a92444a6cc9700e4db9a79e6c4b" Nov 25 10:56:58 crc kubenswrapper[4702]: I1125 10:56:58.992808 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-bootstrap-56m29" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.062756 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-846fc69944-scqjl"] Nov 25 10:56:59 crc kubenswrapper[4702]: E1125 10:56:59.063093 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ba8bde50-661d-47cd-baf0-d1f137fe82ba" containerName="keystone-bootstrap" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.063114 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ba8bde50-661d-47cd-baf0-d1f137fe82ba" containerName="keystone-bootstrap" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.063244 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="ba8bde50-661d-47cd-baf0-d1f137fe82ba" containerName="keystone-bootstrap" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.063770 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.066184 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.066584 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-keystone-dockercfg-8w8n8" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.066641 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-config-data" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.071217 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-846fc69944-scqjl"] Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.072510 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"keystone-scripts" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.195094 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-config-data\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.195158 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9mrb5\" (UniqueName: \"kubernetes.io/projected/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-kube-api-access-9mrb5\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.195198 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-fernet-keys\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.195221 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-scripts\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.195240 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-credential-keys\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.296873 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-config-data\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.296967 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9mrb5\" (UniqueName: \"kubernetes.io/projected/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-kube-api-access-9mrb5\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.297004 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-fernet-keys\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.297032 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-scripts\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.297050 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-credential-keys\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.304522 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-scripts\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.304649 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-fernet-keys\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.305448 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-config-data\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.311465 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-credential-keys\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.322446 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9mrb5\" (UniqueName: \"kubernetes.io/projected/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-kube-api-access-9mrb5\") pod \"keystone-846fc69944-scqjl\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.387038 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:56:59 crc kubenswrapper[4702]: I1125 10:56:59.710633 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-846fc69944-scqjl"] Nov 25 10:57:00 crc kubenswrapper[4702]: I1125 10:57:00.001605 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" event={"ID":"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5","Type":"ContainerStarted","Data":"b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd"} Nov 25 10:57:00 crc kubenswrapper[4702]: I1125 10:57:00.002002 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:57:00 crc kubenswrapper[4702]: I1125 10:57:00.002019 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" event={"ID":"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5","Type":"ContainerStarted","Data":"a210820c8717da1244b23e315f74ef4e060969e80b899c2f8ed5c6c864735ff3"} Nov 25 10:57:00 crc kubenswrapper[4702]: I1125 10:57:00.022816 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" podStartSLOduration=1.022795144 podStartE2EDuration="1.022795144s" podCreationTimestamp="2025-11-25 10:56:59 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 10:57:00.020498489 +0000 UTC m=+1517.387094198" watchObservedRunningTime="2025-11-25 10:57:00.022795144 +0000 UTC m=+1517.389390833" Nov 25 10:57:30 crc kubenswrapper[4702]: I1125 10:57:30.920064 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.290513 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/openstackclient"] Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.291959 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.294274 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"openstack-config-secret" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.294383 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"keystone-kuttl-tests"/"openstack-config" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.294741 4702 reflector.go:368] Caches populated for *v1.Secret from object-"keystone-kuttl-tests"/"default-dockercfg-t9mz2" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.297497 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.450610 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config-secret\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.450708 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wfkj9\" (UniqueName: \"kubernetes.io/projected/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-kube-api-access-wfkj9\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.450752 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.552538 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config-secret\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.552663 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wfkj9\" (UniqueName: \"kubernetes.io/projected/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-kube-api-access-wfkj9\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.552713 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.554064 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.562722 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config-secret\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.572682 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wfkj9\" (UniqueName: \"kubernetes.io/projected/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-kube-api-access-wfkj9\") pod \"openstackclient\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.609619 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.801230 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Nov 25 10:57:32 crc kubenswrapper[4702]: I1125 10:57:32.815116 4702 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 10:57:33 crc kubenswrapper[4702]: I1125 10:57:33.231452 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260","Type":"ContainerStarted","Data":"f88028eb8de27d78ead98a9cd6a62919bad00a882f3e7c9aa1370d2a702df591"} Nov 25 10:57:38 crc kubenswrapper[4702]: I1125 10:57:38.895964 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-xnr94"] Nov 25 10:57:38 crc kubenswrapper[4702]: I1125 10:57:38.897785 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:38 crc kubenswrapper[4702]: I1125 10:57:38.899154 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xnr94"] Nov 25 10:57:38 crc kubenswrapper[4702]: I1125 10:57:38.983339 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dcxlb\" (UniqueName: \"kubernetes.io/projected/4bc37436-92ed-4c1d-85e6-13dc74eefec8-kube-api-access-dcxlb\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:38 crc kubenswrapper[4702]: I1125 10:57:38.983575 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-utilities\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:38 crc kubenswrapper[4702]: I1125 10:57:38.983689 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-catalog-content\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.085057 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dcxlb\" (UniqueName: \"kubernetes.io/projected/4bc37436-92ed-4c1d-85e6-13dc74eefec8-kube-api-access-dcxlb\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.085616 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-utilities\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.086173 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-catalog-content\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.086134 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-utilities\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.086498 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-catalog-content\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.118362 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dcxlb\" (UniqueName: \"kubernetes.io/projected/4bc37436-92ed-4c1d-85e6-13dc74eefec8-kube-api-access-dcxlb\") pod \"community-operators-xnr94\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:39 crc kubenswrapper[4702]: I1125 10:57:39.221663 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:40 crc kubenswrapper[4702]: I1125 10:57:40.866848 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-xnr94"] Nov 25 10:57:40 crc kubenswrapper[4702]: W1125 10:57:40.870645 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bc37436_92ed_4c1d_85e6_13dc74eefec8.slice/crio-fdc0d353b6d64706d744566e47616dd7a65836d4bc0b3bcc2328895a7810ee57 WatchSource:0}: Error finding container fdc0d353b6d64706d744566e47616dd7a65836d4bc0b3bcc2328895a7810ee57: Status 404 returned error can't find the container with id fdc0d353b6d64706d744566e47616dd7a65836d4bc0b3bcc2328895a7810ee57 Nov 25 10:57:41 crc kubenswrapper[4702]: I1125 10:57:41.309136 4702 generic.go:334] "Generic (PLEG): container finished" podID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerID="310b9656e061c905fb0e071a9465540b75249947ba047a7f378374362c43fd5b" exitCode=0 Nov 25 10:57:41 crc kubenswrapper[4702]: I1125 10:57:41.309338 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xnr94" event={"ID":"4bc37436-92ed-4c1d-85e6-13dc74eefec8","Type":"ContainerDied","Data":"310b9656e061c905fb0e071a9465540b75249947ba047a7f378374362c43fd5b"} Nov 25 10:57:41 crc kubenswrapper[4702]: I1125 10:57:41.309515 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xnr94" event={"ID":"4bc37436-92ed-4c1d-85e6-13dc74eefec8","Type":"ContainerStarted","Data":"fdc0d353b6d64706d744566e47616dd7a65836d4bc0b3bcc2328895a7810ee57"} Nov 25 10:57:41 crc kubenswrapper[4702]: I1125 10:57:41.311547 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260","Type":"ContainerStarted","Data":"5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d"} Nov 25 10:57:41 crc kubenswrapper[4702]: I1125 10:57:41.350879 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/openstackclient" podStartSLOduration=1.6811355780000001 podStartE2EDuration="9.350857444s" podCreationTimestamp="2025-11-25 10:57:32 +0000 UTC" firstStartedPulling="2025-11-25 10:57:32.814907494 +0000 UTC m=+1550.181503183" lastFinishedPulling="2025-11-25 10:57:40.48462935 +0000 UTC m=+1557.851225049" observedRunningTime="2025-11-25 10:57:41.346495291 +0000 UTC m=+1558.713091020" watchObservedRunningTime="2025-11-25 10:57:41.350857444 +0000 UTC m=+1558.717453173" Nov 25 10:57:42 crc kubenswrapper[4702]: I1125 10:57:42.319735 4702 generic.go:334] "Generic (PLEG): container finished" podID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerID="2c5555e05a6dc7a130d1007b468427a128e61ad23bbc638ea8769f65c572d703" exitCode=0 Nov 25 10:57:42 crc kubenswrapper[4702]: I1125 10:57:42.319824 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xnr94" event={"ID":"4bc37436-92ed-4c1d-85e6-13dc74eefec8","Type":"ContainerDied","Data":"2c5555e05a6dc7a130d1007b468427a128e61ad23bbc638ea8769f65c572d703"} Nov 25 10:57:43 crc kubenswrapper[4702]: I1125 10:57:43.590384 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:57:43 crc kubenswrapper[4702]: I1125 10:57:43.591746 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:57:44 crc kubenswrapper[4702]: I1125 10:57:44.335528 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xnr94" event={"ID":"4bc37436-92ed-4c1d-85e6-13dc74eefec8","Type":"ContainerStarted","Data":"212b8cc2023a4537a7da0149096282ef91277896cdec4e9ad3d69ee319f1b3dd"} Nov 25 10:57:44 crc kubenswrapper[4702]: I1125 10:57:44.354763 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-xnr94" podStartSLOduration=4.106515583 podStartE2EDuration="6.354742006s" podCreationTimestamp="2025-11-25 10:57:38 +0000 UTC" firstStartedPulling="2025-11-25 10:57:41.310715396 +0000 UTC m=+1558.677311085" lastFinishedPulling="2025-11-25 10:57:43.558941809 +0000 UTC m=+1560.925537508" observedRunningTime="2025-11-25 10:57:44.350325132 +0000 UTC m=+1561.716920831" watchObservedRunningTime="2025-11-25 10:57:44.354742006 +0000 UTC m=+1561.721337695" Nov 25 10:57:47 crc kubenswrapper[4702]: I1125 10:57:47.993535 4702 scope.go:117] "RemoveContainer" containerID="f67620f39482a658358a642b5d376a90c33a2d0b31316efbcff085ad3bfa5aab" Nov 25 10:57:48 crc kubenswrapper[4702]: I1125 10:57:48.015426 4702 scope.go:117] "RemoveContainer" containerID="5ff68c154a540a829475eb3c4bf0ad2f334b79bf3d216f2cf9d1e35c4d2518d7" Nov 25 10:57:49 crc kubenswrapper[4702]: I1125 10:57:49.222449 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:49 crc kubenswrapper[4702]: I1125 10:57:49.222511 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:49 crc kubenswrapper[4702]: I1125 10:57:49.260864 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:49 crc kubenswrapper[4702]: I1125 10:57:49.431137 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:49 crc kubenswrapper[4702]: I1125 10:57:49.491212 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xnr94"] Nov 25 10:57:51 crc kubenswrapper[4702]: I1125 10:57:51.421987 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-xnr94" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="registry-server" containerID="cri-o://212b8cc2023a4537a7da0149096282ef91277896cdec4e9ad3d69ee319f1b3dd" gracePeriod=2 Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.430642 4702 generic.go:334] "Generic (PLEG): container finished" podID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerID="212b8cc2023a4537a7da0149096282ef91277896cdec4e9ad3d69ee319f1b3dd" exitCode=0 Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.430715 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xnr94" event={"ID":"4bc37436-92ed-4c1d-85e6-13dc74eefec8","Type":"ContainerDied","Data":"212b8cc2023a4537a7da0149096282ef91277896cdec4e9ad3d69ee319f1b3dd"} Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.909814 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.977916 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dcxlb\" (UniqueName: \"kubernetes.io/projected/4bc37436-92ed-4c1d-85e6-13dc74eefec8-kube-api-access-dcxlb\") pod \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.978011 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-utilities\") pod \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.978043 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-catalog-content\") pod \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\" (UID: \"4bc37436-92ed-4c1d-85e6-13dc74eefec8\") " Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.979084 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-utilities" (OuterVolumeSpecName: "utilities") pod "4bc37436-92ed-4c1d-85e6-13dc74eefec8" (UID: "4bc37436-92ed-4c1d-85e6-13dc74eefec8"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:57:52 crc kubenswrapper[4702]: I1125 10:57:52.982952 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bc37436-92ed-4c1d-85e6-13dc74eefec8-kube-api-access-dcxlb" (OuterVolumeSpecName: "kube-api-access-dcxlb") pod "4bc37436-92ed-4c1d-85e6-13dc74eefec8" (UID: "4bc37436-92ed-4c1d-85e6-13dc74eefec8"). InnerVolumeSpecName "kube-api-access-dcxlb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.080205 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.080478 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dcxlb\" (UniqueName: \"kubernetes.io/projected/4bc37436-92ed-4c1d-85e6-13dc74eefec8-kube-api-access-dcxlb\") on node \"crc\" DevicePath \"\"" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.445924 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-xnr94" event={"ID":"4bc37436-92ed-4c1d-85e6-13dc74eefec8","Type":"ContainerDied","Data":"fdc0d353b6d64706d744566e47616dd7a65836d4bc0b3bcc2328895a7810ee57"} Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.445988 4702 scope.go:117] "RemoveContainer" containerID="212b8cc2023a4537a7da0149096282ef91277896cdec4e9ad3d69ee319f1b3dd" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.449081 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-xnr94" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.470168 4702 scope.go:117] "RemoveContainer" containerID="2c5555e05a6dc7a130d1007b468427a128e61ad23bbc638ea8769f65c572d703" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.487197 4702 scope.go:117] "RemoveContainer" containerID="310b9656e061c905fb0e071a9465540b75249947ba047a7f378374362c43fd5b" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.647302 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4bc37436-92ed-4c1d-85e6-13dc74eefec8" (UID: "4bc37436-92ed-4c1d-85e6-13dc74eefec8"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.692174 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4bc37436-92ed-4c1d-85e6-13dc74eefec8-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.792362 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-xnr94"] Nov 25 10:57:53 crc kubenswrapper[4702]: I1125 10:57:53.800682 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-xnr94"] Nov 25 10:57:53 crc kubenswrapper[4702]: E1125 10:57:53.882012 4702 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bc37436_92ed_4c1d_85e6_13dc74eefec8.slice/crio-fdc0d353b6d64706d744566e47616dd7a65836d4bc0b3bcc2328895a7810ee57\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4bc37436_92ed_4c1d_85e6_13dc74eefec8.slice\": RecentStats: unable to find data in memory cache]" Nov 25 10:57:55 crc kubenswrapper[4702]: I1125 10:57:55.410664 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" path="/var/lib/kubelet/pods/4bc37436-92ed-4c1d-85e6-13dc74eefec8/volumes" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.130410 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-ffhqx"] Nov 25 10:58:04 crc kubenswrapper[4702]: E1125 10:58:04.131187 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="registry-server" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.131203 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="registry-server" Nov 25 10:58:04 crc kubenswrapper[4702]: E1125 10:58:04.131228 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="extract-utilities" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.131234 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="extract-utilities" Nov 25 10:58:04 crc kubenswrapper[4702]: E1125 10:58:04.131250 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="extract-content" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.131255 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="extract-content" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.131375 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="4bc37436-92ed-4c1d-85e6-13dc74eefec8" containerName="registry-server" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.132233 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.137993 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffhqx"] Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.239796 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-utilities\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.240187 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g695q\" (UniqueName: \"kubernetes.io/projected/2afeac0c-db94-4e72-875a-32e348c84535-kube-api-access-g695q\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.240257 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-catalog-content\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.341646 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-utilities\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.341738 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g695q\" (UniqueName: \"kubernetes.io/projected/2afeac0c-db94-4e72-875a-32e348c84535-kube-api-access-g695q\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.341826 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-catalog-content\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.342317 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-catalog-content\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.342563 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-utilities\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.368057 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g695q\" (UniqueName: \"kubernetes.io/projected/2afeac0c-db94-4e72-875a-32e348c84535-kube-api-access-g695q\") pod \"redhat-marketplace-ffhqx\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.452265 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:04 crc kubenswrapper[4702]: I1125 10:58:04.683451 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffhqx"] Nov 25 10:58:05 crc kubenswrapper[4702]: I1125 10:58:05.530525 4702 generic.go:334] "Generic (PLEG): container finished" podID="2afeac0c-db94-4e72-875a-32e348c84535" containerID="ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d" exitCode=0 Nov 25 10:58:05 crc kubenswrapper[4702]: I1125 10:58:05.530609 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffhqx" event={"ID":"2afeac0c-db94-4e72-875a-32e348c84535","Type":"ContainerDied","Data":"ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d"} Nov 25 10:58:05 crc kubenswrapper[4702]: I1125 10:58:05.531029 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffhqx" event={"ID":"2afeac0c-db94-4e72-875a-32e348c84535","Type":"ContainerStarted","Data":"af50baf47aa464052fbc76850087dbfce54651483e442174a4d56ca461c23d09"} Nov 25 10:58:06 crc kubenswrapper[4702]: I1125 10:58:06.541729 4702 generic.go:334] "Generic (PLEG): container finished" podID="2afeac0c-db94-4e72-875a-32e348c84535" containerID="8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8" exitCode=0 Nov 25 10:58:06 crc kubenswrapper[4702]: I1125 10:58:06.541774 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffhqx" event={"ID":"2afeac0c-db94-4e72-875a-32e348c84535","Type":"ContainerDied","Data":"8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8"} Nov 25 10:58:07 crc kubenswrapper[4702]: I1125 10:58:07.551006 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffhqx" event={"ID":"2afeac0c-db94-4e72-875a-32e348c84535","Type":"ContainerStarted","Data":"71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb"} Nov 25 10:58:07 crc kubenswrapper[4702]: I1125 10:58:07.572812 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-ffhqx" podStartSLOduration=2.165995025 podStartE2EDuration="3.572791988s" podCreationTimestamp="2025-11-25 10:58:04 +0000 UTC" firstStartedPulling="2025-11-25 10:58:05.534223574 +0000 UTC m=+1582.900819263" lastFinishedPulling="2025-11-25 10:58:06.941020537 +0000 UTC m=+1584.307616226" observedRunningTime="2025-11-25 10:58:07.568256041 +0000 UTC m=+1584.934851760" watchObservedRunningTime="2025-11-25 10:58:07.572791988 +0000 UTC m=+1584.939387697" Nov 25 10:58:13 crc kubenswrapper[4702]: I1125 10:58:13.590621 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:58:13 crc kubenswrapper[4702]: I1125 10:58:13.591281 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:58:14 crc kubenswrapper[4702]: I1125 10:58:14.452512 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:14 crc kubenswrapper[4702]: I1125 10:58:14.452893 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:14 crc kubenswrapper[4702]: I1125 10:58:14.504125 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:14 crc kubenswrapper[4702]: I1125 10:58:14.639880 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:14 crc kubenswrapper[4702]: I1125 10:58:14.738469 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffhqx"] Nov 25 10:58:16 crc kubenswrapper[4702]: I1125 10:58:16.611240 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-ffhqx" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="registry-server" containerID="cri-o://71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb" gracePeriod=2 Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.812851 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.955047 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-catalog-content\") pod \"2afeac0c-db94-4e72-875a-32e348c84535\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.955183 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g695q\" (UniqueName: \"kubernetes.io/projected/2afeac0c-db94-4e72-875a-32e348c84535-kube-api-access-g695q\") pod \"2afeac0c-db94-4e72-875a-32e348c84535\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.955255 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-utilities\") pod \"2afeac0c-db94-4e72-875a-32e348c84535\" (UID: \"2afeac0c-db94-4e72-875a-32e348c84535\") " Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.956339 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-utilities" (OuterVolumeSpecName: "utilities") pod "2afeac0c-db94-4e72-875a-32e348c84535" (UID: "2afeac0c-db94-4e72-875a-32e348c84535"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.965848 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2afeac0c-db94-4e72-875a-32e348c84535-kube-api-access-g695q" (OuterVolumeSpecName: "kube-api-access-g695q") pod "2afeac0c-db94-4e72-875a-32e348c84535" (UID: "2afeac0c-db94-4e72-875a-32e348c84535"). InnerVolumeSpecName "kube-api-access-g695q". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.975083 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "2afeac0c-db94-4e72-875a-32e348c84535" (UID: "2afeac0c-db94-4e72-875a-32e348c84535"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.987074 4702 generic.go:334] "Generic (PLEG): container finished" podID="2afeac0c-db94-4e72-875a-32e348c84535" containerID="71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb" exitCode=0 Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.987121 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffhqx" event={"ID":"2afeac0c-db94-4e72-875a-32e348c84535","Type":"ContainerDied","Data":"71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb"} Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.987152 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-ffhqx" event={"ID":"2afeac0c-db94-4e72-875a-32e348c84535","Type":"ContainerDied","Data":"af50baf47aa464052fbc76850087dbfce54651483e442174a4d56ca461c23d09"} Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.987174 4702 scope.go:117] "RemoveContainer" containerID="71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb" Nov 25 10:58:18 crc kubenswrapper[4702]: I1125 10:58:18.987181 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-ffhqx" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.008475 4702 scope.go:117] "RemoveContainer" containerID="8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.035554 4702 scope.go:117] "RemoveContainer" containerID="ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.037770 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffhqx"] Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.050976 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-ffhqx"] Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.053469 4702 scope.go:117] "RemoveContainer" containerID="71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb" Nov 25 10:58:19 crc kubenswrapper[4702]: E1125 10:58:19.054078 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb\": container with ID starting with 71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb not found: ID does not exist" containerID="71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.054213 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb"} err="failed to get container status \"71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb\": rpc error: code = NotFound desc = could not find container \"71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb\": container with ID starting with 71e7646e57271313dc2f7913307a5f48d031626b024e37b958cdda249c95baeb not found: ID does not exist" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.054356 4702 scope.go:117] "RemoveContainer" containerID="8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8" Nov 25 10:58:19 crc kubenswrapper[4702]: E1125 10:58:19.054912 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8\": container with ID starting with 8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8 not found: ID does not exist" containerID="8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.054957 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8"} err="failed to get container status \"8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8\": rpc error: code = NotFound desc = could not find container \"8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8\": container with ID starting with 8191e8d84478f88ec769f9dce8480516b89bbe677550e84156a410bf3c1520b8 not found: ID does not exist" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.054990 4702 scope.go:117] "RemoveContainer" containerID="ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d" Nov 25 10:58:19 crc kubenswrapper[4702]: E1125 10:58:19.055207 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d\": container with ID starting with ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d not found: ID does not exist" containerID="ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.055232 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d"} err="failed to get container status \"ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d\": rpc error: code = NotFound desc = could not find container \"ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d\": container with ID starting with ee97bcf27e407b1a82fcf5eb0c784c2efb1ff7f70160241c5d0b8860d008384d not found: ID does not exist" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.057093 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g695q\" (UniqueName: \"kubernetes.io/projected/2afeac0c-db94-4e72-875a-32e348c84535-kube-api-access-g695q\") on node \"crc\" DevicePath \"\"" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.057120 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.057129 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/2afeac0c-db94-4e72-875a-32e348c84535-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:58:19 crc kubenswrapper[4702]: I1125 10:58:19.412358 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2afeac0c-db94-4e72-875a-32e348c84535" path="/var/lib/kubelet/pods/2afeac0c-db94-4e72-875a-32e348c84535/volumes" Nov 25 10:58:43 crc kubenswrapper[4702]: I1125 10:58:43.590605 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 10:58:43 crc kubenswrapper[4702]: I1125 10:58:43.591447 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 10:58:43 crc kubenswrapper[4702]: I1125 10:58:43.591501 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 10:58:43 crc kubenswrapper[4702]: I1125 10:58:43.592154 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 10:58:43 crc kubenswrapper[4702]: I1125 10:58:43.592242 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" gracePeriod=600 Nov 25 10:58:43 crc kubenswrapper[4702]: E1125 10:58:43.760251 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:58:44 crc kubenswrapper[4702]: I1125 10:58:44.186566 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" exitCode=0 Nov 25 10:58:44 crc kubenswrapper[4702]: I1125 10:58:44.186672 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659"} Nov 25 10:58:44 crc kubenswrapper[4702]: I1125 10:58:44.186966 4702 scope.go:117] "RemoveContainer" containerID="272d347b9d4642895dedc11ba1aec7becf7520474f3145b69f14b52ec045a606" Nov 25 10:58:44 crc kubenswrapper[4702]: I1125 10:58:44.187656 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:58:44 crc kubenswrapper[4702]: E1125 10:58:44.188489 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:58:48 crc kubenswrapper[4702]: I1125 10:58:48.079568 4702 scope.go:117] "RemoveContainer" containerID="7e1990770a2084e80800f57afb9fa8259aeff7c080de121e269a147b232930d2" Nov 25 10:58:48 crc kubenswrapper[4702]: I1125 10:58:48.103641 4702 scope.go:117] "RemoveContainer" containerID="1616714aea9f92174fb037e2310c033518f64aa92b0beed6a29bc963c89a81e5" Nov 25 10:58:48 crc kubenswrapper[4702]: I1125 10:58:48.127714 4702 scope.go:117] "RemoveContainer" containerID="1145f142980a50a9decfeef6a1c8d3089b6091c4973debffcd495e71c2f73ec1" Nov 25 10:58:54 crc kubenswrapper[4702]: I1125 10:58:54.402650 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:58:54 crc kubenswrapper[4702]: E1125 10:58:54.403332 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:59:04 crc kubenswrapper[4702]: I1125 10:59:04.998448 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-46gbv"] Nov 25 10:59:05 crc kubenswrapper[4702]: E1125 10:59:04.999429 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="extract-content" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:04.999446 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="extract-content" Nov 25 10:59:05 crc kubenswrapper[4702]: E1125 10:59:04.999461 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="extract-utilities" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:04.999469 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="extract-utilities" Nov 25 10:59:05 crc kubenswrapper[4702]: E1125 10:59:04.999491 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="registry-server" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:04.999498 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="registry-server" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:04.999637 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="2afeac0c-db94-4e72-875a-32e348c84535" containerName="registry-server" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.000707 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.005309 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-46gbv"] Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.036244 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vvfwb\" (UniqueName: \"kubernetes.io/projected/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-kube-api-access-vvfwb\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.036421 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-catalog-content\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.036458 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-utilities\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.138228 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-catalog-content\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.138280 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-utilities\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.138329 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vvfwb\" (UniqueName: \"kubernetes.io/projected/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-kube-api-access-vvfwb\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.139053 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-utilities\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.139220 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-catalog-content\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.163495 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vvfwb\" (UniqueName: \"kubernetes.io/projected/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-kube-api-access-vvfwb\") pod \"certified-operators-46gbv\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.328549 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:05 crc kubenswrapper[4702]: I1125 10:59:05.821412 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-46gbv"] Nov 25 10:59:06 crc kubenswrapper[4702]: I1125 10:59:06.366892 4702 generic.go:334] "Generic (PLEG): container finished" podID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerID="a5170d6c1418b5e3c5cf73ca0b2fcc737144054bc29caebf9a80cd0ecae1ab17" exitCode=0 Nov 25 10:59:06 crc kubenswrapper[4702]: I1125 10:59:06.366975 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46gbv" event={"ID":"e95f40d5-8529-4b6c-bd45-acbcc16b62ec","Type":"ContainerDied","Data":"a5170d6c1418b5e3c5cf73ca0b2fcc737144054bc29caebf9a80cd0ecae1ab17"} Nov 25 10:59:06 crc kubenswrapper[4702]: I1125 10:59:06.367007 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46gbv" event={"ID":"e95f40d5-8529-4b6c-bd45-acbcc16b62ec","Type":"ContainerStarted","Data":"defd861ef8f86b23b0bb5f539d8caae182c5ef02843afbe54025d030a6d90b79"} Nov 25 10:59:07 crc kubenswrapper[4702]: I1125 10:59:07.403017 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:59:07 crc kubenswrapper[4702]: E1125 10:59:07.403851 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:59:08 crc kubenswrapper[4702]: I1125 10:59:08.381768 4702 generic.go:334] "Generic (PLEG): container finished" podID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerID="b2b79fdf0d396d994a132c288541ba1dd34228d0ef169abecacd3ff0b94ff7ec" exitCode=0 Nov 25 10:59:08 crc kubenswrapper[4702]: I1125 10:59:08.381875 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46gbv" event={"ID":"e95f40d5-8529-4b6c-bd45-acbcc16b62ec","Type":"ContainerDied","Data":"b2b79fdf0d396d994a132c288541ba1dd34228d0ef169abecacd3ff0b94ff7ec"} Nov 25 10:59:09 crc kubenswrapper[4702]: I1125 10:59:09.392785 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46gbv" event={"ID":"e95f40d5-8529-4b6c-bd45-acbcc16b62ec","Type":"ContainerStarted","Data":"f95cf1d49268af474e0981926f0959019f815fa58471882cfff1645d04ffeb86"} Nov 25 10:59:09 crc kubenswrapper[4702]: I1125 10:59:09.413581 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-46gbv" podStartSLOduration=2.946538626 podStartE2EDuration="5.413561891s" podCreationTimestamp="2025-11-25 10:59:04 +0000 UTC" firstStartedPulling="2025-11-25 10:59:06.368787832 +0000 UTC m=+1643.735383521" lastFinishedPulling="2025-11-25 10:59:08.835811097 +0000 UTC m=+1646.202406786" observedRunningTime="2025-11-25 10:59:09.413341535 +0000 UTC m=+1646.779937254" watchObservedRunningTime="2025-11-25 10:59:09.413561891 +0000 UTC m=+1646.780157580" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.368458 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-lj47l"] Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.370153 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.397395 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lj47l"] Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.433646 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-catalog-content\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.433717 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-v6mw5\" (UniqueName: \"kubernetes.io/projected/35019574-e335-4c69-9a66-1b3dbbdffccd-kube-api-access-v6mw5\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.433754 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-utilities\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.535234 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-catalog-content\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.535312 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-v6mw5\" (UniqueName: \"kubernetes.io/projected/35019574-e335-4c69-9a66-1b3dbbdffccd-kube-api-access-v6mw5\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.535346 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-utilities\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.535773 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-catalog-content\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.535808 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-utilities\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.567743 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-v6mw5\" (UniqueName: \"kubernetes.io/projected/35019574-e335-4c69-9a66-1b3dbbdffccd-kube-api-access-v6mw5\") pod \"redhat-operators-lj47l\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.703772 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:11 crc kubenswrapper[4702]: I1125 10:59:11.949743 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-lj47l"] Nov 25 10:59:11 crc kubenswrapper[4702]: W1125 10:59:11.953176 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod35019574_e335_4c69_9a66_1b3dbbdffccd.slice/crio-363f4d048cef22833434d26d4b52a8a321e1c7aa34e0f3326a607469868803d5 WatchSource:0}: Error finding container 363f4d048cef22833434d26d4b52a8a321e1c7aa34e0f3326a607469868803d5: Status 404 returned error can't find the container with id 363f4d048cef22833434d26d4b52a8a321e1c7aa34e0f3326a607469868803d5 Nov 25 10:59:12 crc kubenswrapper[4702]: I1125 10:59:12.417009 4702 generic.go:334] "Generic (PLEG): container finished" podID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerID="6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695" exitCode=0 Nov 25 10:59:12 crc kubenswrapper[4702]: I1125 10:59:12.417060 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerDied","Data":"6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695"} Nov 25 10:59:12 crc kubenswrapper[4702]: I1125 10:59:12.417090 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerStarted","Data":"363f4d048cef22833434d26d4b52a8a321e1c7aa34e0f3326a607469868803d5"} Nov 25 10:59:13 crc kubenswrapper[4702]: I1125 10:59:13.426582 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerStarted","Data":"0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0"} Nov 25 10:59:14 crc kubenswrapper[4702]: I1125 10:59:14.435989 4702 generic.go:334] "Generic (PLEG): container finished" podID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerID="0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0" exitCode=0 Nov 25 10:59:14 crc kubenswrapper[4702]: I1125 10:59:14.436046 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerDied","Data":"0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0"} Nov 25 10:59:15 crc kubenswrapper[4702]: I1125 10:59:15.329612 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:15 crc kubenswrapper[4702]: I1125 10:59:15.331282 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:15 crc kubenswrapper[4702]: I1125 10:59:15.373637 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:15 crc kubenswrapper[4702]: I1125 10:59:15.448613 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerStarted","Data":"eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670"} Nov 25 10:59:15 crc kubenswrapper[4702]: I1125 10:59:15.464795 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-lj47l" podStartSLOduration=2.070634545 podStartE2EDuration="4.464776725s" podCreationTimestamp="2025-11-25 10:59:11 +0000 UTC" firstStartedPulling="2025-11-25 10:59:12.418243915 +0000 UTC m=+1649.784839594" lastFinishedPulling="2025-11-25 10:59:14.812386085 +0000 UTC m=+1652.178981774" observedRunningTime="2025-11-25 10:59:15.463317494 +0000 UTC m=+1652.829913183" watchObservedRunningTime="2025-11-25 10:59:15.464776725 +0000 UTC m=+1652.831372414" Nov 25 10:59:15 crc kubenswrapper[4702]: I1125 10:59:15.497593 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:17 crc kubenswrapper[4702]: I1125 10:59:17.760076 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-46gbv"] Nov 25 10:59:18 crc kubenswrapper[4702]: I1125 10:59:18.469289 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-46gbv" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="registry-server" containerID="cri-o://f95cf1d49268af474e0981926f0959019f815fa58471882cfff1645d04ffeb86" gracePeriod=2 Nov 25 10:59:19 crc kubenswrapper[4702]: I1125 10:59:19.478693 4702 generic.go:334] "Generic (PLEG): container finished" podID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerID="f95cf1d49268af474e0981926f0959019f815fa58471882cfff1645d04ffeb86" exitCode=0 Nov 25 10:59:19 crc kubenswrapper[4702]: I1125 10:59:19.478780 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46gbv" event={"ID":"e95f40d5-8529-4b6c-bd45-acbcc16b62ec","Type":"ContainerDied","Data":"f95cf1d49268af474e0981926f0959019f815fa58471882cfff1645d04ffeb86"} Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.066008 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.191568 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-catalog-content\") pod \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.191801 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-utilities\") pod \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.191871 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vvfwb\" (UniqueName: \"kubernetes.io/projected/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-kube-api-access-vvfwb\") pod \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\" (UID: \"e95f40d5-8529-4b6c-bd45-acbcc16b62ec\") " Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.192707 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-utilities" (OuterVolumeSpecName: "utilities") pod "e95f40d5-8529-4b6c-bd45-acbcc16b62ec" (UID: "e95f40d5-8529-4b6c-bd45-acbcc16b62ec"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.197274 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-kube-api-access-vvfwb" (OuterVolumeSpecName: "kube-api-access-vvfwb") pod "e95f40d5-8529-4b6c-bd45-acbcc16b62ec" (UID: "e95f40d5-8529-4b6c-bd45-acbcc16b62ec"). InnerVolumeSpecName "kube-api-access-vvfwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.235377 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "e95f40d5-8529-4b6c-bd45-acbcc16b62ec" (UID: "e95f40d5-8529-4b6c-bd45-acbcc16b62ec"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.293833 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.293881 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.293891 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vvfwb\" (UniqueName: \"kubernetes.io/projected/e95f40d5-8529-4b6c-bd45-acbcc16b62ec-kube-api-access-vvfwb\") on node \"crc\" DevicePath \"\"" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.490299 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-46gbv" event={"ID":"e95f40d5-8529-4b6c-bd45-acbcc16b62ec","Type":"ContainerDied","Data":"defd861ef8f86b23b0bb5f539d8caae182c5ef02843afbe54025d030a6d90b79"} Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.490353 4702 scope.go:117] "RemoveContainer" containerID="f95cf1d49268af474e0981926f0959019f815fa58471882cfff1645d04ffeb86" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.490406 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-46gbv" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.514398 4702 scope.go:117] "RemoveContainer" containerID="b2b79fdf0d396d994a132c288541ba1dd34228d0ef169abecacd3ff0b94ff7ec" Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.544311 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-46gbv"] Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.550571 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-46gbv"] Nov 25 10:59:20 crc kubenswrapper[4702]: I1125 10:59:20.550693 4702 scope.go:117] "RemoveContainer" containerID="a5170d6c1418b5e3c5cf73ca0b2fcc737144054bc29caebf9a80cd0ecae1ab17" Nov 25 10:59:21 crc kubenswrapper[4702]: I1125 10:59:21.402488 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:59:21 crc kubenswrapper[4702]: E1125 10:59:21.403134 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:59:21 crc kubenswrapper[4702]: I1125 10:59:21.412340 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" path="/var/lib/kubelet/pods/e95f40d5-8529-4b6c-bd45-acbcc16b62ec/volumes" Nov 25 10:59:21 crc kubenswrapper[4702]: I1125 10:59:21.703642 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:21 crc kubenswrapper[4702]: I1125 10:59:21.703731 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:21 crc kubenswrapper[4702]: I1125 10:59:21.742623 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:22 crc kubenswrapper[4702]: I1125 10:59:22.560627 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:22 crc kubenswrapper[4702]: I1125 10:59:22.959932 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lj47l"] Nov 25 10:59:24 crc kubenswrapper[4702]: I1125 10:59:24.521030 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-lj47l" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="registry-server" containerID="cri-o://eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670" gracePeriod=2 Nov 25 10:59:24 crc kubenswrapper[4702]: I1125 10:59:24.903970 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.056592 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-utilities\") pod \"35019574-e335-4c69-9a66-1b3dbbdffccd\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.056647 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-catalog-content\") pod \"35019574-e335-4c69-9a66-1b3dbbdffccd\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.056756 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v6mw5\" (UniqueName: \"kubernetes.io/projected/35019574-e335-4c69-9a66-1b3dbbdffccd-kube-api-access-v6mw5\") pod \"35019574-e335-4c69-9a66-1b3dbbdffccd\" (UID: \"35019574-e335-4c69-9a66-1b3dbbdffccd\") " Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.058077 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-utilities" (OuterVolumeSpecName: "utilities") pod "35019574-e335-4c69-9a66-1b3dbbdffccd" (UID: "35019574-e335-4c69-9a66-1b3dbbdffccd"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.061551 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/35019574-e335-4c69-9a66-1b3dbbdffccd-kube-api-access-v6mw5" (OuterVolumeSpecName: "kube-api-access-v6mw5") pod "35019574-e335-4c69-9a66-1b3dbbdffccd" (UID: "35019574-e335-4c69-9a66-1b3dbbdffccd"). InnerVolumeSpecName "kube-api-access-v6mw5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.159016 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.159736 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v6mw5\" (UniqueName: \"kubernetes.io/projected/35019574-e335-4c69-9a66-1b3dbbdffccd-kube-api-access-v6mw5\") on node \"crc\" DevicePath \"\"" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.161956 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "35019574-e335-4c69-9a66-1b3dbbdffccd" (UID: "35019574-e335-4c69-9a66-1b3dbbdffccd"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.264883 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/35019574-e335-4c69-9a66-1b3dbbdffccd-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.528492 4702 generic.go:334] "Generic (PLEG): container finished" podID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerID="eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670" exitCode=0 Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.528532 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerDied","Data":"eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670"} Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.528539 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-lj47l" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.528555 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-lj47l" event={"ID":"35019574-e335-4c69-9a66-1b3dbbdffccd","Type":"ContainerDied","Data":"363f4d048cef22833434d26d4b52a8a321e1c7aa34e0f3326a607469868803d5"} Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.528570 4702 scope.go:117] "RemoveContainer" containerID="eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.551239 4702 scope.go:117] "RemoveContainer" containerID="0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.552174 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-lj47l"] Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.557847 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-lj47l"] Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.568462 4702 scope.go:117] "RemoveContainer" containerID="6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.593810 4702 scope.go:117] "RemoveContainer" containerID="eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670" Nov 25 10:59:25 crc kubenswrapper[4702]: E1125 10:59:25.594333 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670\": container with ID starting with eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670 not found: ID does not exist" containerID="eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.594381 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670"} err="failed to get container status \"eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670\": rpc error: code = NotFound desc = could not find container \"eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670\": container with ID starting with eb4fcc10c4b979a51996f81f3dc7d32408c653d5a05ce45d57433e0833d2f670 not found: ID does not exist" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.594415 4702 scope.go:117] "RemoveContainer" containerID="0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0" Nov 25 10:59:25 crc kubenswrapper[4702]: E1125 10:59:25.594694 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0\": container with ID starting with 0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0 not found: ID does not exist" containerID="0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.594724 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0"} err="failed to get container status \"0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0\": rpc error: code = NotFound desc = could not find container \"0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0\": container with ID starting with 0d16696d66048495a854dba765a404a87be236a3d9e52b57a498b6204d1f78f0 not found: ID does not exist" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.594745 4702 scope.go:117] "RemoveContainer" containerID="6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695" Nov 25 10:59:25 crc kubenswrapper[4702]: E1125 10:59:25.595033 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695\": container with ID starting with 6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695 not found: ID does not exist" containerID="6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695" Nov 25 10:59:25 crc kubenswrapper[4702]: I1125 10:59:25.595068 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695"} err="failed to get container status \"6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695\": rpc error: code = NotFound desc = could not find container \"6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695\": container with ID starting with 6b5c91398ad4198bacadc677ada6cb080c284ebb6aea1c35ad850debf79f1695 not found: ID does not exist" Nov 25 10:59:27 crc kubenswrapper[4702]: I1125 10:59:27.415733 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" path="/var/lib/kubelet/pods/35019574-e335-4c69-9a66-1b3dbbdffccd/volumes" Nov 25 10:59:34 crc kubenswrapper[4702]: I1125 10:59:34.402813 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:59:34 crc kubenswrapper[4702]: E1125 10:59:34.404059 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:59:46 crc kubenswrapper[4702]: I1125 10:59:46.402053 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:59:46 crc kubenswrapper[4702]: E1125 10:59:46.403052 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 10:59:48 crc kubenswrapper[4702]: I1125 10:59:48.244201 4702 scope.go:117] "RemoveContainer" containerID="453e84ed84e16e2db801e37829458dc8a2bf0e5e8acaaaa734dd47c3e759221a" Nov 25 10:59:48 crc kubenswrapper[4702]: I1125 10:59:48.290652 4702 scope.go:117] "RemoveContainer" containerID="97f695ecd3e4a0dcaff75ab259c3fa4134cbbf812adfe5c163b4ea8e86a41780" Nov 25 10:59:48 crc kubenswrapper[4702]: I1125 10:59:48.326932 4702 scope.go:117] "RemoveContainer" containerID="bc75a92a5503a3e979a7efb806a25558412a7d8ea74dfb21f616f440706bd82a" Nov 25 10:59:48 crc kubenswrapper[4702]: I1125 10:59:48.351337 4702 scope.go:117] "RemoveContainer" containerID="280c65ba8ebe99528481bd89ba715c42c20e53181145a079e3dc4578b4273289" Nov 25 10:59:59 crc kubenswrapper[4702]: I1125 10:59:59.401795 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 10:59:59 crc kubenswrapper[4702]: E1125 10:59:59.402628 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164041 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx"] Nov 25 11:00:00 crc kubenswrapper[4702]: E1125 11:00:00.164627 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="extract-utilities" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164646 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="extract-utilities" Nov 25 11:00:00 crc kubenswrapper[4702]: E1125 11:00:00.164669 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="registry-server" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164677 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="registry-server" Nov 25 11:00:00 crc kubenswrapper[4702]: E1125 11:00:00.164686 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="registry-server" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164694 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="registry-server" Nov 25 11:00:00 crc kubenswrapper[4702]: E1125 11:00:00.164714 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="extract-utilities" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164721 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="extract-utilities" Nov 25 11:00:00 crc kubenswrapper[4702]: E1125 11:00:00.164737 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="extract-content" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164745 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="extract-content" Nov 25 11:00:00 crc kubenswrapper[4702]: E1125 11:00:00.164755 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="extract-content" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164762 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="extract-content" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164926 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="35019574-e335-4c69-9a66-1b3dbbdffccd" containerName="registry-server" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.164946 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="e95f40d5-8529-4b6c-bd45-acbcc16b62ec" containerName="registry-server" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.166342 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.169188 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.169382 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.176788 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx"] Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.265088 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rs4nh\" (UniqueName: \"kubernetes.io/projected/4553acd4-99d7-413b-b0db-14f39fe1abd7-kube-api-access-rs4nh\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.265373 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4553acd4-99d7-413b-b0db-14f39fe1abd7-config-volume\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.265644 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4553acd4-99d7-413b-b0db-14f39fe1abd7-secret-volume\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.367104 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4553acd4-99d7-413b-b0db-14f39fe1abd7-config-volume\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.367195 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4553acd4-99d7-413b-b0db-14f39fe1abd7-secret-volume\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.367235 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rs4nh\" (UniqueName: \"kubernetes.io/projected/4553acd4-99d7-413b-b0db-14f39fe1abd7-kube-api-access-rs4nh\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.368173 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4553acd4-99d7-413b-b0db-14f39fe1abd7-config-volume\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.373349 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4553acd4-99d7-413b-b0db-14f39fe1abd7-secret-volume\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.389149 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rs4nh\" (UniqueName: \"kubernetes.io/projected/4553acd4-99d7-413b-b0db-14f39fe1abd7-kube-api-access-rs4nh\") pod \"collect-profiles-29401140-gfxgx\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.494424 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:00 crc kubenswrapper[4702]: I1125 11:00:00.903032 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx"] Nov 25 11:00:01 crc kubenswrapper[4702]: I1125 11:00:01.793293 4702 generic.go:334] "Generic (PLEG): container finished" podID="4553acd4-99d7-413b-b0db-14f39fe1abd7" containerID="68918fe61eacdef03e036e3bbb5e23b1cb600d11daebb01caa7a756d14b64e18" exitCode=0 Nov 25 11:00:01 crc kubenswrapper[4702]: I1125 11:00:01.793582 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" event={"ID":"4553acd4-99d7-413b-b0db-14f39fe1abd7","Type":"ContainerDied","Data":"68918fe61eacdef03e036e3bbb5e23b1cb600d11daebb01caa7a756d14b64e18"} Nov 25 11:00:01 crc kubenswrapper[4702]: I1125 11:00:01.793624 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" event={"ID":"4553acd4-99d7-413b-b0db-14f39fe1abd7","Type":"ContainerStarted","Data":"483707b5106fa0cce4806090daef7334263c64e3c75dae935e1351df260df230"} Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.035280 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.106753 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4553acd4-99d7-413b-b0db-14f39fe1abd7-secret-volume\") pod \"4553acd4-99d7-413b-b0db-14f39fe1abd7\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.106827 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rs4nh\" (UniqueName: \"kubernetes.io/projected/4553acd4-99d7-413b-b0db-14f39fe1abd7-kube-api-access-rs4nh\") pod \"4553acd4-99d7-413b-b0db-14f39fe1abd7\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.106861 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4553acd4-99d7-413b-b0db-14f39fe1abd7-config-volume\") pod \"4553acd4-99d7-413b-b0db-14f39fe1abd7\" (UID: \"4553acd4-99d7-413b-b0db-14f39fe1abd7\") " Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.107666 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4553acd4-99d7-413b-b0db-14f39fe1abd7-config-volume" (OuterVolumeSpecName: "config-volume") pod "4553acd4-99d7-413b-b0db-14f39fe1abd7" (UID: "4553acd4-99d7-413b-b0db-14f39fe1abd7"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.113013 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4553acd4-99d7-413b-b0db-14f39fe1abd7-kube-api-access-rs4nh" (OuterVolumeSpecName: "kube-api-access-rs4nh") pod "4553acd4-99d7-413b-b0db-14f39fe1abd7" (UID: "4553acd4-99d7-413b-b0db-14f39fe1abd7"). InnerVolumeSpecName "kube-api-access-rs4nh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.113274 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4553acd4-99d7-413b-b0db-14f39fe1abd7-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "4553acd4-99d7-413b-b0db-14f39fe1abd7" (UID: "4553acd4-99d7-413b-b0db-14f39fe1abd7"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.208179 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rs4nh\" (UniqueName: \"kubernetes.io/projected/4553acd4-99d7-413b-b0db-14f39fe1abd7-kube-api-access-rs4nh\") on node \"crc\" DevicePath \"\"" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.208239 4702 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/4553acd4-99d7-413b-b0db-14f39fe1abd7-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.208252 4702 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/4553acd4-99d7-413b-b0db-14f39fe1abd7-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.812420 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" event={"ID":"4553acd4-99d7-413b-b0db-14f39fe1abd7","Type":"ContainerDied","Data":"483707b5106fa0cce4806090daef7334263c64e3c75dae935e1351df260df230"} Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.812456 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="483707b5106fa0cce4806090daef7334263c64e3c75dae935e1351df260df230" Nov 25 11:00:03 crc kubenswrapper[4702]: I1125 11:00:03.812463 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401140-gfxgx" Nov 25 11:00:14 crc kubenswrapper[4702]: I1125 11:00:14.402401 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:00:14 crc kubenswrapper[4702]: E1125 11:00:14.402927 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:00:25 crc kubenswrapper[4702]: I1125 11:00:25.402371 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:00:25 crc kubenswrapper[4702]: E1125 11:00:25.403584 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:00:40 crc kubenswrapper[4702]: I1125 11:00:40.402414 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:00:40 crc kubenswrapper[4702]: E1125 11:00:40.403632 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:00:48 crc kubenswrapper[4702]: I1125 11:00:48.447099 4702 scope.go:117] "RemoveContainer" containerID="bd0dca3dea27484db4e8258da18778e24d1580208fcb39d5bee2c54067187158" Nov 25 11:00:48 crc kubenswrapper[4702]: I1125 11:00:48.483012 4702 scope.go:117] "RemoveContainer" containerID="a220a5e2311e81b030c74226a63a7ea4718764b1076c55f00ef41a425ac43e34" Nov 25 11:00:48 crc kubenswrapper[4702]: I1125 11:00:48.512331 4702 scope.go:117] "RemoveContainer" containerID="c2ff9776f4536b8d26994d2aef594f34dc183e1e01b29f8ada227b0bbc3684ab" Nov 25 11:00:48 crc kubenswrapper[4702]: I1125 11:00:48.529669 4702 scope.go:117] "RemoveContainer" containerID="981b1fcfa0dbe2677cbb696df2870e06c2a008295f64b21996113f85b389d3db" Nov 25 11:00:48 crc kubenswrapper[4702]: I1125 11:00:48.556757 4702 scope.go:117] "RemoveContainer" containerID="91647b2ee01839662d1ba608c4d54aeda9e941c35df0e7dd8a14a1ee6e76292d" Nov 25 11:00:55 crc kubenswrapper[4702]: I1125 11:00:55.402593 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:00:55 crc kubenswrapper[4702]: E1125 11:00:55.403752 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.138138 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone-cron-29401141-jnmk5"] Nov 25 11:01:00 crc kubenswrapper[4702]: E1125 11:01:00.139048 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4553acd4-99d7-413b-b0db-14f39fe1abd7" containerName="collect-profiles" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.139061 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="4553acd4-99d7-413b-b0db-14f39fe1abd7" containerName="collect-profiles" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.139238 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="4553acd4-99d7-413b-b0db-14f39fe1abd7" containerName="collect-profiles" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.139666 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.154494 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-cron-29401141-jnmk5"] Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.186670 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ff8hg\" (UniqueName: \"kubernetes.io/projected/30273a0e-e547-46ce-b261-22d804292837-kube-api-access-ff8hg\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.186842 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-fernet-keys\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.186879 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-config-data\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.288515 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ff8hg\" (UniqueName: \"kubernetes.io/projected/30273a0e-e547-46ce-b261-22d804292837-kube-api-access-ff8hg\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.288640 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-fernet-keys\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.288673 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-config-data\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.296931 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-config-data\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.297005 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-fernet-keys\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.306127 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ff8hg\" (UniqueName: \"kubernetes.io/projected/30273a0e-e547-46ce-b261-22d804292837-kube-api-access-ff8hg\") pod \"keystone-cron-29401141-jnmk5\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.463107 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:00 crc kubenswrapper[4702]: I1125 11:01:00.720352 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone-cron-29401141-jnmk5"] Nov 25 11:01:01 crc kubenswrapper[4702]: I1125 11:01:01.220204 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" event={"ID":"30273a0e-e547-46ce-b261-22d804292837","Type":"ContainerStarted","Data":"1234e7607711fa560e795101d30d28592177f15de2f41adefec2d09afce863a3"} Nov 25 11:01:01 crc kubenswrapper[4702]: I1125 11:01:01.220515 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" event={"ID":"30273a0e-e547-46ce-b261-22d804292837","Type":"ContainerStarted","Data":"aaba255a9ec8dbffd0668541f1bda7bd32a295023d0786dc73dcb3b8ebdd7362"} Nov 25 11:01:01 crc kubenswrapper[4702]: I1125 11:01:01.236397 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" podStartSLOduration=1.236378148 podStartE2EDuration="1.236378148s" podCreationTimestamp="2025-11-25 11:01:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 11:01:01.233657729 +0000 UTC m=+1758.600253438" watchObservedRunningTime="2025-11-25 11:01:01.236378148 +0000 UTC m=+1758.602973827" Nov 25 11:01:03 crc kubenswrapper[4702]: I1125 11:01:03.232591 4702 generic.go:334] "Generic (PLEG): container finished" podID="30273a0e-e547-46ce-b261-22d804292837" containerID="1234e7607711fa560e795101d30d28592177f15de2f41adefec2d09afce863a3" exitCode=0 Nov 25 11:01:03 crc kubenswrapper[4702]: I1125 11:01:03.232644 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" event={"ID":"30273a0e-e547-46ce-b261-22d804292837","Type":"ContainerDied","Data":"1234e7607711fa560e795101d30d28592177f15de2f41adefec2d09afce863a3"} Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.475786 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.650802 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ff8hg\" (UniqueName: \"kubernetes.io/projected/30273a0e-e547-46ce-b261-22d804292837-kube-api-access-ff8hg\") pod \"30273a0e-e547-46ce-b261-22d804292837\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.650888 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-fernet-keys\") pod \"30273a0e-e547-46ce-b261-22d804292837\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.650959 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-config-data\") pod \"30273a0e-e547-46ce-b261-22d804292837\" (UID: \"30273a0e-e547-46ce-b261-22d804292837\") " Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.655773 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "30273a0e-e547-46ce-b261-22d804292837" (UID: "30273a0e-e547-46ce-b261-22d804292837"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.655974 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30273a0e-e547-46ce-b261-22d804292837-kube-api-access-ff8hg" (OuterVolumeSpecName: "kube-api-access-ff8hg") pod "30273a0e-e547-46ce-b261-22d804292837" (UID: "30273a0e-e547-46ce-b261-22d804292837"). InnerVolumeSpecName "kube-api-access-ff8hg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.682480 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-config-data" (OuterVolumeSpecName: "config-data") pod "30273a0e-e547-46ce-b261-22d804292837" (UID: "30273a0e-e547-46ce-b261-22d804292837"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.752562 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ff8hg\" (UniqueName: \"kubernetes.io/projected/30273a0e-e547-46ce-b261-22d804292837-kube-api-access-ff8hg\") on node \"crc\" DevicePath \"\"" Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.752597 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 11:01:04 crc kubenswrapper[4702]: I1125 11:01:04.752610 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/30273a0e-e547-46ce-b261-22d804292837-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 11:01:05 crc kubenswrapper[4702]: I1125 11:01:05.248183 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" event={"ID":"30273a0e-e547-46ce-b261-22d804292837","Type":"ContainerDied","Data":"aaba255a9ec8dbffd0668541f1bda7bd32a295023d0786dc73dcb3b8ebdd7362"} Nov 25 11:01:05 crc kubenswrapper[4702]: I1125 11:01:05.248235 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="aaba255a9ec8dbffd0668541f1bda7bd32a295023d0786dc73dcb3b8ebdd7362" Nov 25 11:01:05 crc kubenswrapper[4702]: I1125 11:01:05.248236 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-cron-29401141-jnmk5" Nov 25 11:01:09 crc kubenswrapper[4702]: I1125 11:01:09.402701 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:01:09 crc kubenswrapper[4702]: E1125 11:01:09.403508 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:01:24 crc kubenswrapper[4702]: I1125 11:01:24.402444 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:01:24 crc kubenswrapper[4702]: E1125 11:01:24.403285 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:01:37 crc kubenswrapper[4702]: I1125 11:01:37.401933 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:01:37 crc kubenswrapper[4702]: E1125 11:01:37.402813 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:01:48 crc kubenswrapper[4702]: I1125 11:01:48.645525 4702 scope.go:117] "RemoveContainer" containerID="836570181d3b203c1e9bdf23df44eceffcd9409da4a20ca978737d29dc351283" Nov 25 11:01:48 crc kubenswrapper[4702]: I1125 11:01:48.677271 4702 scope.go:117] "RemoveContainer" containerID="d7256210ff50e62235f6d651064e6e58c732176074fd0f20feaa2293fdef71d7" Nov 25 11:01:48 crc kubenswrapper[4702]: I1125 11:01:48.699403 4702 scope.go:117] "RemoveContainer" containerID="4e875c298086ffb9ebb3f07e90b9e6dc438dee12acdb58f49c555d9a813972d4" Nov 25 11:01:48 crc kubenswrapper[4702]: I1125 11:01:48.771459 4702 scope.go:117] "RemoveContainer" containerID="c339a74f260121e898cf28adac5e3e0d83dfd70a903e82c1cf651cd574e6e7d0" Nov 25 11:01:48 crc kubenswrapper[4702]: I1125 11:01:48.817943 4702 scope.go:117] "RemoveContainer" containerID="3ad22f60c132479ae514676fafd59a0fd968eab76e66cd462427c70426704f8c" Nov 25 11:01:49 crc kubenswrapper[4702]: I1125 11:01:49.401885 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:01:49 crc kubenswrapper[4702]: E1125 11:01:49.402154 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:02:00 crc kubenswrapper[4702]: I1125 11:02:00.402656 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:02:00 crc kubenswrapper[4702]: E1125 11:02:00.403324 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:02:12 crc kubenswrapper[4702]: I1125 11:02:12.402197 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:02:12 crc kubenswrapper[4702]: E1125 11:02:12.402746 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:02:24 crc kubenswrapper[4702]: I1125 11:02:24.402349 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:02:24 crc kubenswrapper[4702]: E1125 11:02:24.403144 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:02:36 crc kubenswrapper[4702]: I1125 11:02:36.402984 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:02:36 crc kubenswrapper[4702]: E1125 11:02:36.403860 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:02:47 crc kubenswrapper[4702]: I1125 11:02:47.402667 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:02:47 crc kubenswrapper[4702]: E1125 11:02:47.403389 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:02:48 crc kubenswrapper[4702]: I1125 11:02:48.904372 4702 scope.go:117] "RemoveContainer" containerID="4e138d7ee57296d1aaf2529a2e690781275dcdcf3d3b187f22ee5617e4023e81" Nov 25 11:02:59 crc kubenswrapper[4702]: I1125 11:02:59.403047 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:02:59 crc kubenswrapper[4702]: E1125 11:02:59.403721 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:03:11 crc kubenswrapper[4702]: I1125 11:03:11.402806 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:03:11 crc kubenswrapper[4702]: E1125 11:03:11.403667 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:03:26 crc kubenswrapper[4702]: I1125 11:03:26.401875 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:03:26 crc kubenswrapper[4702]: E1125 11:03:26.402615 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:03:40 crc kubenswrapper[4702]: I1125 11:03:40.402536 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:03:40 crc kubenswrapper[4702]: E1125 11:03:40.403519 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:03:52 crc kubenswrapper[4702]: I1125 11:03:52.401996 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:03:53 crc kubenswrapper[4702]: I1125 11:03:53.496495 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"2afa2cc519d18597e6cb179d37ccff1596b73bd4c5e742111c8ccb1dfcf95693"} Nov 25 11:06:13 crc kubenswrapper[4702]: I1125 11:06:13.591099 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:06:13 crc kubenswrapper[4702]: I1125 11:06:13.591711 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:06:43 crc kubenswrapper[4702]: I1125 11:06:43.590979 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:06:43 crc kubenswrapper[4702]: I1125 11:06:43.591633 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:06:45 crc kubenswrapper[4702]: I1125 11:06:45.047307 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-49kqw"] Nov 25 11:06:45 crc kubenswrapper[4702]: I1125 11:06:45.052967 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-create-49kqw"] Nov 25 11:06:45 crc kubenswrapper[4702]: I1125 11:06:45.411584 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a18904e0-0c8c-4dc4-b018-6edaed969779" path="/var/lib/kubelet/pods/a18904e0-0c8c-4dc4-b018-6edaed969779/volumes" Nov 25 11:06:46 crc kubenswrapper[4702]: I1125 11:06:46.018262 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj"] Nov 25 11:06:46 crc kubenswrapper[4702]: I1125 11:06:46.022381 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-2de6-account-create-update-hxmqj"] Nov 25 11:06:47 crc kubenswrapper[4702]: I1125 11:06:47.410926 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="75008377-9dc3-4121-8794-e51ad732a7ca" path="/var/lib/kubelet/pods/75008377-9dc3-4121-8794-e51ad732a7ca/volumes" Nov 25 11:06:48 crc kubenswrapper[4702]: I1125 11:06:48.986355 4702 scope.go:117] "RemoveContainer" containerID="99c2bf1f80796866704cf7a2c6ec2eddee465827c5b4b17e1ca59501791a4b11" Nov 25 11:06:49 crc kubenswrapper[4702]: I1125 11:06:49.007773 4702 scope.go:117] "RemoveContainer" containerID="4562ccc282fea249e6526624cbde8b21303cfe47a746b8fd52535047200d259e" Nov 25 11:06:52 crc kubenswrapper[4702]: I1125 11:06:52.029210 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-lwm4x"] Nov 25 11:06:52 crc kubenswrapper[4702]: I1125 11:06:52.036471 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-db-sync-lwm4x"] Nov 25 11:06:53 crc kubenswrapper[4702]: I1125 11:06:53.416546 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ae07277-35e9-408c-ad38-080789945cc7" path="/var/lib/kubelet/pods/8ae07277-35e9-408c-ad38-080789945cc7/volumes" Nov 25 11:06:59 crc kubenswrapper[4702]: I1125 11:06:59.025189 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-56m29"] Nov 25 11:06:59 crc kubenswrapper[4702]: I1125 11:06:59.031675 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-bootstrap-56m29"] Nov 25 11:06:59 crc kubenswrapper[4702]: I1125 11:06:59.410752 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ba8bde50-661d-47cd-baf0-d1f137fe82ba" path="/var/lib/kubelet/pods/ba8bde50-661d-47cd-baf0-d1f137fe82ba/volumes" Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.591291 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.591884 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.591937 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.592576 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2afa2cc519d18597e6cb179d37ccff1596b73bd4c5e742111c8ccb1dfcf95693"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.592643 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://2afa2cc519d18597e6cb179d37ccff1596b73bd4c5e742111c8ccb1dfcf95693" gracePeriod=600 Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.808714 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="2afa2cc519d18597e6cb179d37ccff1596b73bd4c5e742111c8ccb1dfcf95693" exitCode=0 Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.808752 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"2afa2cc519d18597e6cb179d37ccff1596b73bd4c5e742111c8ccb1dfcf95693"} Nov 25 11:07:13 crc kubenswrapper[4702]: I1125 11:07:13.808959 4702 scope.go:117] "RemoveContainer" containerID="84dc0d8a729699374d3c353da9b66c3b1e4f1b4637a8a6dffc66cb171354c659" Nov 25 11:07:14 crc kubenswrapper[4702]: I1125 11:07:14.817363 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0"} Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.302198 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.302942 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstackclient" podUID="3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" containerName="openstackclient" containerID="cri-o://5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d" gracePeriod=30 Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.690243 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.825189 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config-secret\") pod \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.825260 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wfkj9\" (UniqueName: \"kubernetes.io/projected/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-kube-api-access-wfkj9\") pod \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.825279 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config\") pod \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\" (UID: \"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260\") " Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.831204 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-kube-api-access-wfkj9" (OuterVolumeSpecName: "kube-api-access-wfkj9") pod "3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" (UID: "3b922fb6-9c1a-41a2-8e6f-15c6b93b3260"). InnerVolumeSpecName "kube-api-access-wfkj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.844512 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config" (OuterVolumeSpecName: "openstack-config") pod "3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" (UID: "3b922fb6-9c1a-41a2-8e6f-15c6b93b3260"). InnerVolumeSpecName "openstack-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.846895 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config-secret" (OuterVolumeSpecName: "openstack-config-secret") pod "3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" (UID: "3b922fb6-9c1a-41a2-8e6f-15c6b93b3260"). InnerVolumeSpecName "openstack-config-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.926767 4702 reconciler_common.go:293] "Volume detached for volume \"openstack-config-secret\" (UniqueName: \"kubernetes.io/secret/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config-secret\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.926810 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wfkj9\" (UniqueName: \"kubernetes.io/projected/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-kube-api-access-wfkj9\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:48 crc kubenswrapper[4702]: I1125 11:07:48.926824 4702 reconciler_common.go:293] "Volume detached for volume \"openstack-config\" (UniqueName: \"kubernetes.io/configmap/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260-openstack-config\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.043899 4702 generic.go:334] "Generic (PLEG): container finished" podID="3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" containerID="5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d" exitCode=143 Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.043989 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260","Type":"ContainerDied","Data":"5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d"} Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.044031 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstackclient" event={"ID":"3b922fb6-9c1a-41a2-8e6f-15c6b93b3260","Type":"ContainerDied","Data":"f88028eb8de27d78ead98a9cd6a62919bad00a882f3e7c9aa1370d2a702df591"} Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.044047 4702 scope.go:117] "RemoveContainer" containerID="5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.043972 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstackclient" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.064086 4702 scope.go:117] "RemoveContainer" containerID="5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d" Nov 25 11:07:49 crc kubenswrapper[4702]: E1125 11:07:49.064451 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d\": container with ID starting with 5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d not found: ID does not exist" containerID="5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.064482 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d"} err="failed to get container status \"5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d\": rpc error: code = NotFound desc = could not find container \"5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d\": container with ID starting with 5a7b36632526d2d65e6acc7d0887e55bdde9497c31c3d4dd22642ece8e39770d not found: ID does not exist" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.068446 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.073260 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstackclient"] Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.107332 4702 scope.go:117] "RemoveContainer" containerID="53741271cef622075ecd89f39df860a0400c9bb6d59b6ab51e0a5b65ef07a1b4" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.150206 4702 scope.go:117] "RemoveContainer" containerID="1ab8ef1b3e8a80345048473e2acb198396d893efb47cbd6886b76860ca776277" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.207602 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-846fc69944-scqjl"] Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.210856 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" podUID="7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" containerName="keystone-api" containerID="cri-o://b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd" gracePeriod=30 Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.215746 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-cron-29401141-jnmk5"] Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.225945 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-cron-29401141-jnmk5"] Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.263506 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["keystone-kuttl-tests/keystone2de6-account-delete-m5nw7"] Nov 25 11:07:49 crc kubenswrapper[4702]: E1125 11:07:49.263895 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30273a0e-e547-46ce-b261-22d804292837" containerName="keystone-cron" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.263934 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="30273a0e-e547-46ce-b261-22d804292837" containerName="keystone-cron" Nov 25 11:07:49 crc kubenswrapper[4702]: E1125 11:07:49.263953 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" containerName="openstackclient" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.263961 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" containerName="openstackclient" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.264111 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="30273a0e-e547-46ce-b261-22d804292837" containerName="keystone-cron" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.264127 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" containerName="openstackclient" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.264670 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.270083 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone2de6-account-delete-m5nw7"] Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.334064 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-operator-scripts\") pod \"keystone2de6-account-delete-m5nw7\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.334311 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ccj2m\" (UniqueName: \"kubernetes.io/projected/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-kube-api-access-ccj2m\") pod \"keystone2de6-account-delete-m5nw7\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.414451 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30273a0e-e547-46ce-b261-22d804292837" path="/var/lib/kubelet/pods/30273a0e-e547-46ce-b261-22d804292837/volumes" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.415026 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b922fb6-9c1a-41a2-8e6f-15c6b93b3260" path="/var/lib/kubelet/pods/3b922fb6-9c1a-41a2-8e6f-15c6b93b3260/volumes" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.436527 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ccj2m\" (UniqueName: \"kubernetes.io/projected/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-kube-api-access-ccj2m\") pod \"keystone2de6-account-delete-m5nw7\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.437079 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-operator-scripts\") pod \"keystone2de6-account-delete-m5nw7\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.437793 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-operator-scripts\") pod \"keystone2de6-account-delete-m5nw7\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.457432 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ccj2m\" (UniqueName: \"kubernetes.io/projected/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-kube-api-access-ccj2m\") pod \"keystone2de6-account-delete-m5nw7\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.584553 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:49 crc kubenswrapper[4702]: I1125 11:07:49.985782 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/keystone2de6-account-delete-m5nw7"] Nov 25 11:07:50 crc kubenswrapper[4702]: I1125 11:07:50.052160 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" event={"ID":"ef9ace14-2e75-4123-aef0-fc3fa0edd31e","Type":"ContainerStarted","Data":"0a4ff914c8b02e10abc27e332a659c585950ec64dcf1448e6c9a86cd49f0ae6c"} Nov 25 11:07:51 crc kubenswrapper[4702]: I1125 11:07:51.060858 4702 generic.go:334] "Generic (PLEG): container finished" podID="ef9ace14-2e75-4123-aef0-fc3fa0edd31e" containerID="a0bfd12615832aa33c4776ec3bf5d837c15a030ff59cee20529568ed10f03617" exitCode=0 Nov 25 11:07:51 crc kubenswrapper[4702]: I1125 11:07:51.060946 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" event={"ID":"ef9ace14-2e75-4123-aef0-fc3fa0edd31e","Type":"ContainerDied","Data":"a0bfd12615832aa33c4776ec3bf5d837c15a030ff59cee20529568ed10f03617"} Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.419797 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.497379 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ccj2m\" (UniqueName: \"kubernetes.io/projected/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-kube-api-access-ccj2m\") pod \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.497473 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-operator-scripts\") pod \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\" (UID: \"ef9ace14-2e75-4123-aef0-fc3fa0edd31e\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.500040 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ef9ace14-2e75-4123-aef0-fc3fa0edd31e" (UID: "ef9ace14-2e75-4123-aef0-fc3fa0edd31e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.516296 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-kube-api-access-ccj2m" (OuterVolumeSpecName: "kube-api-access-ccj2m") pod "ef9ace14-2e75-4123-aef0-fc3fa0edd31e" (UID: "ef9ace14-2e75-4123-aef0-fc3fa0edd31e"). InnerVolumeSpecName "kube-api-access-ccj2m". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.600636 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ccj2m\" (UniqueName: \"kubernetes.io/projected/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-kube-api-access-ccj2m\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.600688 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ef9ace14-2e75-4123-aef0-fc3fa0edd31e-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.605741 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.701247 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-config-data\") pod \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.701283 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-scripts\") pod \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.701363 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9mrb5\" (UniqueName: \"kubernetes.io/projected/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-kube-api-access-9mrb5\") pod \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.701383 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-fernet-keys\") pod \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.701426 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-credential-keys\") pod \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\" (UID: \"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5\") " Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.704440 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" (UID: "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.704794 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" (UID: "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.705205 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-scripts" (OuterVolumeSpecName: "scripts") pod "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" (UID: "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.706021 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-kube-api-access-9mrb5" (OuterVolumeSpecName: "kube-api-access-9mrb5") pod "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" (UID: "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5"). InnerVolumeSpecName "kube-api-access-9mrb5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.718790 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-config-data" (OuterVolumeSpecName: "config-data") pod "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" (UID: "7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.803208 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.803259 4702 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.803273 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9mrb5\" (UniqueName: \"kubernetes.io/projected/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-kube-api-access-9mrb5\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.803285 4702 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-fernet-keys\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:52 crc kubenswrapper[4702]: I1125 11:07:52.803301 4702 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5-credential-keys\") on node \"crc\" DevicePath \"\"" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.079134 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" event={"ID":"ef9ace14-2e75-4123-aef0-fc3fa0edd31e","Type":"ContainerDied","Data":"0a4ff914c8b02e10abc27e332a659c585950ec64dcf1448e6c9a86cd49f0ae6c"} Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.079179 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0a4ff914c8b02e10abc27e332a659c585950ec64dcf1448e6c9a86cd49f0ae6c" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.079240 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone2de6-account-delete-m5nw7" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.123750 4702 generic.go:334] "Generic (PLEG): container finished" podID="7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" containerID="b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd" exitCode=0 Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.123799 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" event={"ID":"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5","Type":"ContainerDied","Data":"b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd"} Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.123831 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" event={"ID":"7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5","Type":"ContainerDied","Data":"a210820c8717da1244b23e315f74ef4e060969e80b899c2f8ed5c6c864735ff3"} Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.123854 4702 scope.go:117] "RemoveContainer" containerID="b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.123864 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/keystone-846fc69944-scqjl" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.162051 4702 scope.go:117] "RemoveContainer" containerID="b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd" Nov 25 11:07:53 crc kubenswrapper[4702]: E1125 11:07:53.162707 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd\": container with ID starting with b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd not found: ID does not exist" containerID="b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.162761 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd"} err="failed to get container status \"b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd\": rpc error: code = NotFound desc = could not find container \"b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd\": container with ID starting with b3a8dfb5759f55e14cdc2ec20eb668e4db802e576c64bb836f5d6f4acc280efd not found: ID does not exist" Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.166955 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone-846fc69944-scqjl"] Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.173042 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone-846fc69944-scqjl"] Nov 25 11:07:53 crc kubenswrapper[4702]: I1125 11:07:53.412090 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" path="/var/lib/kubelet/pods/7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5/volumes" Nov 25 11:07:54 crc kubenswrapper[4702]: I1125 11:07:54.302313 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/keystone2de6-account-delete-m5nw7"] Nov 25 11:07:54 crc kubenswrapper[4702]: I1125 11:07:54.308787 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/keystone2de6-account-delete-m5nw7"] Nov 25 11:07:55 crc kubenswrapper[4702]: I1125 11:07:55.410731 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ef9ace14-2e75-4123-aef0-fc3fa0edd31e" path="/var/lib/kubelet/pods/ef9ace14-2e75-4123-aef0-fc3fa0edd31e/volumes" Nov 25 11:08:03 crc kubenswrapper[4702]: I1125 11:08:03.168529 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Nov 25 11:08:03 crc kubenswrapper[4702]: I1125 11:08:03.182034 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Nov 25 11:08:03 crc kubenswrapper[4702]: I1125 11:08:03.186959 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Nov 25 11:08:03 crc kubenswrapper[4702]: I1125 11:08:03.309855 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstack-galera-2" podUID="a3a68041-1390-4922-81b3-ca65322db681" containerName="galera" containerID="cri-o://e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635" gracePeriod=30 Nov 25 11:08:03 crc kubenswrapper[4702]: I1125 11:08:03.859362 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Nov 25 11:08:03 crc kubenswrapper[4702]: I1125 11:08:03.859576 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/memcached-0" podUID="438d54fb-aec3-476a-ae67-1d906854d271" containerName="memcached" containerID="cri-o://94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b" gracePeriod=30 Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.128643 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.205734 4702 generic.go:334] "Generic (PLEG): container finished" podID="a3a68041-1390-4922-81b3-ca65322db681" containerID="e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635" exitCode=0 Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.205782 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"a3a68041-1390-4922-81b3-ca65322db681","Type":"ContainerDied","Data":"e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635"} Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.205809 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-2" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.205825 4702 scope.go:117] "RemoveContainer" containerID="e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.205813 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-2" event={"ID":"a3a68041-1390-4922-81b3-ca65322db681","Type":"ContainerDied","Data":"30ff0201de893c87705f437d0bbc5fe37c402d7d7682e986d2215dac39c35787"} Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.224730 4702 scope.go:117] "RemoveContainer" containerID="557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.246500 4702 scope.go:117] "RemoveContainer" containerID="e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635" Nov 25 11:08:04 crc kubenswrapper[4702]: E1125 11:08:04.249328 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635\": container with ID starting with e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635 not found: ID does not exist" containerID="e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.249359 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635"} err="failed to get container status \"e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635\": rpc error: code = NotFound desc = could not find container \"e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635\": container with ID starting with e1c79829575377d0b08d9a55018783f18f3a7f8536e84e55e3b69de266623635 not found: ID does not exist" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.249378 4702 scope.go:117] "RemoveContainer" containerID="557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696" Nov 25 11:08:04 crc kubenswrapper[4702]: E1125 11:08:04.250038 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696\": container with ID starting with 557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696 not found: ID does not exist" containerID="557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.250112 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696"} err="failed to get container status \"557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696\": rpc error: code = NotFound desc = could not find container \"557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696\": container with ID starting with 557485d5cc950a28a1859e2e4ca2648675604c01de2d0755188e2a5c18c12696 not found: ID does not exist" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.281551 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a3a68041-1390-4922-81b3-ca65322db681-config-data-generated\") pod \"a3a68041-1390-4922-81b3-ca65322db681\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.281619 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-config-data-default\") pod \"a3a68041-1390-4922-81b3-ca65322db681\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.281672 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-kolla-config\") pod \"a3a68041-1390-4922-81b3-ca65322db681\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.281764 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-operator-scripts\") pod \"a3a68041-1390-4922-81b3-ca65322db681\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.281825 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"a3a68041-1390-4922-81b3-ca65322db681\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.282135 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fh45j\" (UniqueName: \"kubernetes.io/projected/a3a68041-1390-4922-81b3-ca65322db681-kube-api-access-fh45j\") pod \"a3a68041-1390-4922-81b3-ca65322db681\" (UID: \"a3a68041-1390-4922-81b3-ca65322db681\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.283330 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3a68041-1390-4922-81b3-ca65322db681-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "a3a68041-1390-4922-81b3-ca65322db681" (UID: "a3a68041-1390-4922-81b3-ca65322db681"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.283674 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "a3a68041-1390-4922-81b3-ca65322db681" (UID: "a3a68041-1390-4922-81b3-ca65322db681"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.283804 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "a3a68041-1390-4922-81b3-ca65322db681" (UID: "a3a68041-1390-4922-81b3-ca65322db681"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.284231 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "a3a68041-1390-4922-81b3-ca65322db681" (UID: "a3a68041-1390-4922-81b3-ca65322db681"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.285691 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.288589 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3a68041-1390-4922-81b3-ca65322db681-kube-api-access-fh45j" (OuterVolumeSpecName: "kube-api-access-fh45j") pod "a3a68041-1390-4922-81b3-ca65322db681" (UID: "a3a68041-1390-4922-81b3-ca65322db681"). InnerVolumeSpecName "kube-api-access-fh45j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.295004 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "a3a68041-1390-4922-81b3-ca65322db681" (UID: "a3a68041-1390-4922-81b3-ca65322db681"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.383734 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.383778 4702 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.383791 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fh45j\" (UniqueName: \"kubernetes.io/projected/a3a68041-1390-4922-81b3-ca65322db681-kube-api-access-fh45j\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.383805 4702 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/a3a68041-1390-4922-81b3-ca65322db681-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.383823 4702 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.383833 4702 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/a3a68041-1390-4922-81b3-ca65322db681-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.395103 4702 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.485373 4702 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.547694 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.554572 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstack-galera-2"] Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.619089 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.666297 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.687144 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nbjt4\" (UniqueName: \"kubernetes.io/projected/438d54fb-aec3-476a-ae67-1d906854d271-kube-api-access-nbjt4\") pod \"438d54fb-aec3-476a-ae67-1d906854d271\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.687233 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-kolla-config\") pod \"438d54fb-aec3-476a-ae67-1d906854d271\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.687284 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-config-data\") pod \"438d54fb-aec3-476a-ae67-1d906854d271\" (UID: \"438d54fb-aec3-476a-ae67-1d906854d271\") " Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.688125 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "438d54fb-aec3-476a-ae67-1d906854d271" (UID: "438d54fb-aec3-476a-ae67-1d906854d271"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.688141 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-config-data" (OuterVolumeSpecName: "config-data") pod "438d54fb-aec3-476a-ae67-1d906854d271" (UID: "438d54fb-aec3-476a-ae67-1d906854d271"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.693419 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/438d54fb-aec3-476a-ae67-1d906854d271-kube-api-access-nbjt4" (OuterVolumeSpecName: "kube-api-access-nbjt4") pod "438d54fb-aec3-476a-ae67-1d906854d271" (UID: "438d54fb-aec3-476a-ae67-1d906854d271"). InnerVolumeSpecName "kube-api-access-nbjt4". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.788622 4702 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.788666 4702 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/438d54fb-aec3-476a-ae67-1d906854d271-config-data\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:04 crc kubenswrapper[4702]: I1125 11:08:04.788679 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nbjt4\" (UniqueName: \"kubernetes.io/projected/438d54fb-aec3-476a-ae67-1d906854d271-kube-api-access-nbjt4\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.215379 4702 generic.go:334] "Generic (PLEG): container finished" podID="438d54fb-aec3-476a-ae67-1d906854d271" containerID="94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b" exitCode=0 Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.215423 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/memcached-0" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.215463 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"438d54fb-aec3-476a-ae67-1d906854d271","Type":"ContainerDied","Data":"94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b"} Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.215504 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/memcached-0" event={"ID":"438d54fb-aec3-476a-ae67-1d906854d271","Type":"ContainerDied","Data":"47c44fc9c00baab154ffa6e2e6fc76d926f97c42c2ddab11905a827af1ea1bbd"} Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.215528 4702 scope.go:117] "RemoveContainer" containerID="94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.234930 4702 scope.go:117] "RemoveContainer" containerID="94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b" Nov 25 11:08:05 crc kubenswrapper[4702]: E1125 11:08:05.235297 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b\": container with ID starting with 94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b not found: ID does not exist" containerID="94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.235324 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b"} err="failed to get container status \"94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b\": rpc error: code = NotFound desc = could not find container \"94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b\": container with ID starting with 94528bac149bd80075dbe5dc06dbb461e4a16655782e65872150cf5caef1539b not found: ID does not exist" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.250317 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.253855 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/memcached-0"] Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.262641 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/rabbitmq-server-0" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerName="rabbitmq" containerID="cri-o://393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8" gracePeriod=604800 Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.410778 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="438d54fb-aec3-476a-ae67-1d906854d271" path="/var/lib/kubelet/pods/438d54fb-aec3-476a-ae67-1d906854d271/volumes" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.411487 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3a68041-1390-4922-81b3-ca65322db681" path="/var/lib/kubelet/pods/a3a68041-1390-4922-81b3-ca65322db681/volumes" Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.490164 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw"] Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.490758 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" podUID="f106a32a-e625-4380-9d1c-683bbf9036bc" containerName="manager" containerID="cri-o://5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca" gracePeriod=10 Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.640584 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstack-galera-1" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerName="galera" containerID="cri-o://5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970" gracePeriod=28 Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.760124 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-j9plj"] Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.760351 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-j9plj" podUID="7c562a4b-ceef-41a2-aa46-a2962017eb2b" containerName="registry-server" containerID="cri-o://d962d7e00791a4f71fd52183116aa7ac77c107948516c26bfda67efb406f5f0a" gracePeriod=30 Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.837124 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq"] Nov 25 11:08:05 crc kubenswrapper[4702]: I1125 11:08:05.840605 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/7b2b1f3700c0a9d64e231ff0df4f67171334e6283b1343bb90f12024f22xjtq"] Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.137521 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.214372 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-webhook-cert\") pod \"f106a32a-e625-4380-9d1c-683bbf9036bc\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.214484 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sp9bp\" (UniqueName: \"kubernetes.io/projected/f106a32a-e625-4380-9d1c-683bbf9036bc-kube-api-access-sp9bp\") pod \"f106a32a-e625-4380-9d1c-683bbf9036bc\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.214522 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-apiservice-cert\") pod \"f106a32a-e625-4380-9d1c-683bbf9036bc\" (UID: \"f106a32a-e625-4380-9d1c-683bbf9036bc\") " Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.220667 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "f106a32a-e625-4380-9d1c-683bbf9036bc" (UID: "f106a32a-e625-4380-9d1c-683bbf9036bc"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.221288 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f106a32a-e625-4380-9d1c-683bbf9036bc-kube-api-access-sp9bp" (OuterVolumeSpecName: "kube-api-access-sp9bp") pod "f106a32a-e625-4380-9d1c-683bbf9036bc" (UID: "f106a32a-e625-4380-9d1c-683bbf9036bc"). InnerVolumeSpecName "kube-api-access-sp9bp". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.222567 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "f106a32a-e625-4380-9d1c-683bbf9036bc" (UID: "f106a32a-e625-4380-9d1c-683bbf9036bc"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.227604 4702 generic.go:334] "Generic (PLEG): container finished" podID="7c562a4b-ceef-41a2-aa46-a2962017eb2b" containerID="d962d7e00791a4f71fd52183116aa7ac77c107948516c26bfda67efb406f5f0a" exitCode=0 Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.227718 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-j9plj" event={"ID":"7c562a4b-ceef-41a2-aa46-a2962017eb2b","Type":"ContainerDied","Data":"d962d7e00791a4f71fd52183116aa7ac77c107948516c26bfda67efb406f5f0a"} Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.227780 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-j9plj" event={"ID":"7c562a4b-ceef-41a2-aa46-a2962017eb2b","Type":"ContainerDied","Data":"a8a730ab7b7b52f2383c1ea37cbe6b7e3be398ae62463db4fd106871d13cca8e"} Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.227798 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="a8a730ab7b7b52f2383c1ea37cbe6b7e3be398ae62463db4fd106871d13cca8e" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.229882 4702 generic.go:334] "Generic (PLEG): container finished" podID="f106a32a-e625-4380-9d1c-683bbf9036bc" containerID="5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca" exitCode=0 Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.229924 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" event={"ID":"f106a32a-e625-4380-9d1c-683bbf9036bc","Type":"ContainerDied","Data":"5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca"} Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.229943 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" event={"ID":"f106a32a-e625-4380-9d1c-683bbf9036bc","Type":"ContainerDied","Data":"9b2758f30ccb273b264c686972218dcbea73a3ece2da88f33317f0f509743602"} Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.229967 4702 scope.go:117] "RemoveContainer" containerID="5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.230093 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.250491 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.253920 4702 scope.go:117] "RemoveContainer" containerID="5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca" Nov 25 11:08:06 crc kubenswrapper[4702]: E1125 11:08:06.258029 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca\": container with ID starting with 5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca not found: ID does not exist" containerID="5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.258073 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca"} err="failed to get container status \"5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca\": rpc error: code = NotFound desc = could not find container \"5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca\": container with ID starting with 5219e7a3e119da454da4c0162298b82ab7eaefb1657c2bbcdb6d7875e19dd2ca not found: ID does not exist" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.275155 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw"] Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.284589 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-6d87d877ff-4vvkw"] Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.317719 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mqb74\" (UniqueName: \"kubernetes.io/projected/7c562a4b-ceef-41a2-aa46-a2962017eb2b-kube-api-access-mqb74\") pod \"7c562a4b-ceef-41a2-aa46-a2962017eb2b\" (UID: \"7c562a4b-ceef-41a2-aa46-a2962017eb2b\") " Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.318066 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sp9bp\" (UniqueName: \"kubernetes.io/projected/f106a32a-e625-4380-9d1c-683bbf9036bc-kube-api-access-sp9bp\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.318089 4702 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.318101 4702 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/f106a32a-e625-4380-9d1c-683bbf9036bc-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.322318 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7c562a4b-ceef-41a2-aa46-a2962017eb2b-kube-api-access-mqb74" (OuterVolumeSpecName: "kube-api-access-mqb74") pod "7c562a4b-ceef-41a2-aa46-a2962017eb2b" (UID: "7c562a4b-ceef-41a2-aa46-a2962017eb2b"). InnerVolumeSpecName "kube-api-access-mqb74". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.419457 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mqb74\" (UniqueName: \"kubernetes.io/projected/7c562a4b-ceef-41a2-aa46-a2962017eb2b-kube-api-access-mqb74\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:06 crc kubenswrapper[4702]: I1125 11:08:06.960757 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028211 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028278 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-pod-info\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028332 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-plugins\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028386 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-plugins-conf\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028456 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-confd\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028487 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-erlang-cookie\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028522 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nrqc6\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-kube-api-access-nrqc6\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.028617 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-erlang-cookie-secret\") pod \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\" (UID: \"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.029127 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.029187 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.029578 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.032463 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-pod-info" (OuterVolumeSpecName: "pod-info") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.039195 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-kube-api-access-nrqc6" (OuterVolumeSpecName: "kube-api-access-nrqc6") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "kube-api-access-nrqc6". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.045117 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.052443 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955" (OuterVolumeSpecName: "persistence") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "pvc-06ad8973-3126-4d03-bd80-02618c78d955". PluginName "kubernetes.io/csi", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.104382 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" (UID: "d83d39a8-b9f9-4526-ba09-9aa3848fe7b8"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130117 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nrqc6\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-kube-api-access-nrqc6\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130155 4702 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130191 4702 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-06ad8973-3126-4d03-bd80-02618c78d955\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") on node \"crc\" " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130205 4702 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-pod-info\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130215 4702 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130225 4702 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-plugins-conf\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130234 4702 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.130245 4702 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.143102 4702 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.143253 4702 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-06ad8973-3126-4d03-bd80-02618c78d955" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955") on node "crc" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.231556 4702 reconciler_common.go:293] "Volume detached for volume \"pvc-06ad8973-3126-4d03-bd80-02618c78d955\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-06ad8973-3126-4d03-bd80-02618c78d955\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.238595 4702 generic.go:334] "Generic (PLEG): container finished" podID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerID="393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8" exitCode=0 Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.238635 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/rabbitmq-server-0" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.238650 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8","Type":"ContainerDied","Data":"393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8"} Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.238675 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/rabbitmq-server-0" event={"ID":"d83d39a8-b9f9-4526-ba09-9aa3848fe7b8","Type":"ContainerDied","Data":"21710379d2aed65f6759d3587d04d60a691b86dfda1bdb9ead5d656e63d2629f"} Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.238699 4702 scope.go:117] "RemoveContainer" containerID="393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.240375 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-j9plj" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.255786 4702 scope.go:117] "RemoveContainer" containerID="b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.288043 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-j9plj"] Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.289283 4702 scope.go:117] "RemoveContainer" containerID="393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8" Nov 25 11:08:07 crc kubenswrapper[4702]: E1125 11:08:07.289685 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8\": container with ID starting with 393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8 not found: ID does not exist" containerID="393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.289738 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8"} err="failed to get container status \"393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8\": rpc error: code = NotFound desc = could not find container \"393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8\": container with ID starting with 393f6d8af07c2b59031a4cd357be182596bd27fd11c21d526bcdaf7d5f6b24a8 not found: ID does not exist" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.289768 4702 scope.go:117] "RemoveContainer" containerID="b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80" Nov 25 11:08:07 crc kubenswrapper[4702]: E1125 11:08:07.290086 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80\": container with ID starting with b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80 not found: ID does not exist" containerID="b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.290117 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80"} err="failed to get container status \"b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80\": rpc error: code = NotFound desc = could not find container \"b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80\": container with ID starting with b050485b6900ce71bb3de0d03d86290cec0dc7bd4e03b3203e7a13ea79d91d80 not found: ID does not exist" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.292648 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-j9plj"] Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.302042 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.306806 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/rabbitmq-server-0"] Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.410926 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7c562a4b-ceef-41a2-aa46-a2962017eb2b" path="/var/lib/kubelet/pods/7c562a4b-ceef-41a2-aa46-a2962017eb2b/volumes" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.411759 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf" path="/var/lib/kubelet/pods/9253c0c1-fc1f-4505-bfcb-a2b001fe7ebf/volumes" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.412710 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" path="/var/lib/kubelet/pods/d83d39a8-b9f9-4526-ba09-9aa3848fe7b8/volumes" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.414026 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f106a32a-e625-4380-9d1c-683bbf9036bc" path="/var/lib/kubelet/pods/f106a32a-e625-4380-9d1c-683bbf9036bc/volumes" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.433218 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="keystone-kuttl-tests/openstack-galera-0" podUID="78dea2f0-701b-4124-8def-a3c353705d62" containerName="galera" containerID="cri-o://b12c1d8415b206ae55f9b0eef6e039d2d6e9349fe58bc1f24a1d752a43759216" gracePeriod=26 Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.718133 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.840206 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-n2sjh\" (UniqueName: \"kubernetes.io/projected/f3415a28-7b55-4649-94fd-9b976e6919d7-kube-api-access-n2sjh\") pod \"f3415a28-7b55-4649-94fd-9b976e6919d7\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.840331 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-kolla-config\") pod \"f3415a28-7b55-4649-94fd-9b976e6919d7\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.840435 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-operator-scripts\") pod \"f3415a28-7b55-4649-94fd-9b976e6919d7\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.841029 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "f3415a28-7b55-4649-94fd-9b976e6919d7" (UID: "f3415a28-7b55-4649-94fd-9b976e6919d7"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.841221 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f3415a28-7b55-4649-94fd-9b976e6919d7" (UID: "f3415a28-7b55-4649-94fd-9b976e6919d7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.841321 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") pod \"f3415a28-7b55-4649-94fd-9b976e6919d7\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.841680 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-default\") pod \"f3415a28-7b55-4649-94fd-9b976e6919d7\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.841718 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-generated\") pod \"f3415a28-7b55-4649-94fd-9b976e6919d7\" (UID: \"f3415a28-7b55-4649-94fd-9b976e6919d7\") " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.842124 4702 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.842143 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.842187 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "f3415a28-7b55-4649-94fd-9b976e6919d7" (UID: "f3415a28-7b55-4649-94fd-9b976e6919d7"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.842363 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "f3415a28-7b55-4649-94fd-9b976e6919d7" (UID: "f3415a28-7b55-4649-94fd-9b976e6919d7"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.845923 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3415a28-7b55-4649-94fd-9b976e6919d7-kube-api-access-n2sjh" (OuterVolumeSpecName: "kube-api-access-n2sjh") pod "f3415a28-7b55-4649-94fd-9b976e6919d7" (UID: "f3415a28-7b55-4649-94fd-9b976e6919d7"). InnerVolumeSpecName "kube-api-access-n2sjh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.850294 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage03-crc" (OuterVolumeSpecName: "mysql-db") pod "f3415a28-7b55-4649-94fd-9b976e6919d7" (UID: "f3415a28-7b55-4649-94fd-9b976e6919d7"). InnerVolumeSpecName "local-storage03-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.943611 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-n2sjh\" (UniqueName: \"kubernetes.io/projected/f3415a28-7b55-4649-94fd-9b976e6919d7-kube-api-access-n2sjh\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.943682 4702 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" " Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.943693 4702 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.943705 4702 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/f3415a28-7b55-4649-94fd-9b976e6919d7-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:07 crc kubenswrapper[4702]: I1125 11:08:07.957158 4702 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage03-crc" (UniqueName: "kubernetes.io/local-volume/local-storage03-crc") on node "crc" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.044931 4702 reconciler_common.go:293] "Volume detached for volume \"local-storage03-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage03-crc\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.227883 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj"] Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.228123 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="manager" containerID="cri-o://26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5" gracePeriod=10 Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.228195 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="kube-rbac-proxy" containerID="cri-o://e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017" gracePeriod=10 Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.249927 4702 generic.go:334] "Generic (PLEG): container finished" podID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerID="5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970" exitCode=0 Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.250003 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"f3415a28-7b55-4649-94fd-9b976e6919d7","Type":"ContainerDied","Data":"5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970"} Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.250035 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-1" event={"ID":"f3415a28-7b55-4649-94fd-9b976e6919d7","Type":"ContainerDied","Data":"4444271ffdad6db0b45be0f5e5497ee89d779bced028479dcacb33fe5718327a"} Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.250055 4702 scope.go:117] "RemoveContainer" containerID="5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.250178 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-1" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.262131 4702 generic.go:334] "Generic (PLEG): container finished" podID="78dea2f0-701b-4124-8def-a3c353705d62" containerID="b12c1d8415b206ae55f9b0eef6e039d2d6e9349fe58bc1f24a1d752a43759216" exitCode=0 Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.262244 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"78dea2f0-701b-4124-8def-a3c353705d62","Type":"ContainerDied","Data":"b12c1d8415b206ae55f9b0eef6e039d2d6e9349fe58bc1f24a1d752a43759216"} Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.262274 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="keystone-kuttl-tests/openstack-galera-0" event={"ID":"78dea2f0-701b-4124-8def-a3c353705d62","Type":"ContainerDied","Data":"be605fbc8731375b19d5f0d92ffd0d5141c80cbfad9c6ca61dd8ce54018ef636"} Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.262287 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="be605fbc8731375b19d5f0d92ffd0d5141c80cbfad9c6ca61dd8ce54018ef636" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.270996 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.294343 4702 scope.go:117] "RemoveContainer" containerID="d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.329995 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.333525 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstack-galera-1"] Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.349361 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-config-data-default\") pod \"78dea2f0-701b-4124-8def-a3c353705d62\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.349452 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") pod \"78dea2f0-701b-4124-8def-a3c353705d62\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.349481 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-kolla-config\") pod \"78dea2f0-701b-4124-8def-a3c353705d62\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.349505 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78dea2f0-701b-4124-8def-a3c353705d62-config-data-generated\") pod \"78dea2f0-701b-4124-8def-a3c353705d62\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.349558 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6q7j\" (UniqueName: \"kubernetes.io/projected/78dea2f0-701b-4124-8def-a3c353705d62-kube-api-access-g6q7j\") pod \"78dea2f0-701b-4124-8def-a3c353705d62\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.349599 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-operator-scripts\") pod \"78dea2f0-701b-4124-8def-a3c353705d62\" (UID: \"78dea2f0-701b-4124-8def-a3c353705d62\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.351686 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "78dea2f0-701b-4124-8def-a3c353705d62" (UID: "78dea2f0-701b-4124-8def-a3c353705d62"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.352236 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "78dea2f0-701b-4124-8def-a3c353705d62" (UID: "78dea2f0-701b-4124-8def-a3c353705d62"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.352873 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "78dea2f0-701b-4124-8def-a3c353705d62" (UID: "78dea2f0-701b-4124-8def-a3c353705d62"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.355584 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/78dea2f0-701b-4124-8def-a3c353705d62-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "78dea2f0-701b-4124-8def-a3c353705d62" (UID: "78dea2f0-701b-4124-8def-a3c353705d62"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.358106 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/78dea2f0-701b-4124-8def-a3c353705d62-kube-api-access-g6q7j" (OuterVolumeSpecName: "kube-api-access-g6q7j") pod "78dea2f0-701b-4124-8def-a3c353705d62" (UID: "78dea2f0-701b-4124-8def-a3c353705d62"). InnerVolumeSpecName "kube-api-access-g6q7j". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.360507 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage06-crc" (OuterVolumeSpecName: "mysql-db") pod "78dea2f0-701b-4124-8def-a3c353705d62" (UID: "78dea2f0-701b-4124-8def-a3c353705d62"). InnerVolumeSpecName "local-storage06-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.382472 4702 scope.go:117] "RemoveContainer" containerID="5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970" Nov 25 11:08:08 crc kubenswrapper[4702]: E1125 11:08:08.383922 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970\": container with ID starting with 5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970 not found: ID does not exist" containerID="5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.383968 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970"} err="failed to get container status \"5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970\": rpc error: code = NotFound desc = could not find container \"5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970\": container with ID starting with 5205e5cadbeb30240c1a8059f9a17285c92751b39e611321e657b06931f87970 not found: ID does not exist" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.384024 4702 scope.go:117] "RemoveContainer" containerID="d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a" Nov 25 11:08:08 crc kubenswrapper[4702]: E1125 11:08:08.384543 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a\": container with ID starting with d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a not found: ID does not exist" containerID="d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.384753 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a"} err="failed to get container status \"d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a\": rpc error: code = NotFound desc = could not find container \"d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a\": container with ID starting with d899fc771bfe0f88d2b868875aa6c204196769608f682a884ab56a31dc38f82a not found: ID does not exist" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.451562 4702 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.451620 4702 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-kolla-config\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.451634 4702 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/78dea2f0-701b-4124-8def-a3c353705d62-config-data-generated\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.451643 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6q7j\" (UniqueName: \"kubernetes.io/projected/78dea2f0-701b-4124-8def-a3c353705d62-kube-api-access-g6q7j\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.451652 4702 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-operator-scripts\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.451660 4702 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/78dea2f0-701b-4124-8def-a3c353705d62-config-data-default\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.467117 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-pm8nx"] Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.467367 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-pm8nx" podUID="0a31b7b4-f333-4334-8940-873e0a462d72" containerName="registry-server" containerID="cri-o://dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b" gracePeriod=30 Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.477218 4702 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage06-crc" (UniqueName: "kubernetes.io/local-volume/local-storage06-crc") on node "crc" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.497401 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql"] Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.504353 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/5d473c3169f40b179d14921c90af2c8546b7b757fe551b7dba7d903f5dkhqql"] Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.552637 4702 reconciler_common.go:293] "Volume detached for volume \"local-storage06-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage06-crc\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.666571 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.754755 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-apiservice-cert\") pod \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.756113 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-webhook-cert\") pod \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.756195 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hbpkr\" (UniqueName: \"kubernetes.io/projected/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-kube-api-access-hbpkr\") pod \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\" (UID: \"a79a35ba-b1cc-4c3b-bf33-43ff1af46972\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.758722 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "a79a35ba-b1cc-4c3b-bf33-43ff1af46972" (UID: "a79a35ba-b1cc-4c3b-bf33-43ff1af46972"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.759418 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-kube-api-access-hbpkr" (OuterVolumeSpecName: "kube-api-access-hbpkr") pod "a79a35ba-b1cc-4c3b-bf33-43ff1af46972" (UID: "a79a35ba-b1cc-4c3b-bf33-43ff1af46972"). InnerVolumeSpecName "kube-api-access-hbpkr". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.759510 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "a79a35ba-b1cc-4c3b-bf33-43ff1af46972" (UID: "a79a35ba-b1cc-4c3b-bf33-43ff1af46972"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.835574 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.857700 4702 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.857734 4702 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.857744 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hbpkr\" (UniqueName: \"kubernetes.io/projected/a79a35ba-b1cc-4c3b-bf33-43ff1af46972-kube-api-access-hbpkr\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.958846 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22cz5\" (UniqueName: \"kubernetes.io/projected/0a31b7b4-f333-4334-8940-873e0a462d72-kube-api-access-22cz5\") pod \"0a31b7b4-f333-4334-8940-873e0a462d72\" (UID: \"0a31b7b4-f333-4334-8940-873e0a462d72\") " Nov 25 11:08:08 crc kubenswrapper[4702]: I1125 11:08:08.962972 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0a31b7b4-f333-4334-8940-873e0a462d72-kube-api-access-22cz5" (OuterVolumeSpecName: "kube-api-access-22cz5") pod "0a31b7b4-f333-4334-8940-873e0a462d72" (UID: "0a31b7b4-f333-4334-8940-873e0a462d72"). InnerVolumeSpecName "kube-api-access-22cz5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.060641 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22cz5\" (UniqueName: \"kubernetes.io/projected/0a31b7b4-f333-4334-8940-873e0a462d72-kube-api-access-22cz5\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272459 4702 generic.go:334] "Generic (PLEG): container finished" podID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerID="e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017" exitCode=0 Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272496 4702 generic.go:334] "Generic (PLEG): container finished" podID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerID="26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5" exitCode=0 Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272512 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272546 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" event={"ID":"a79a35ba-b1cc-4c3b-bf33-43ff1af46972","Type":"ContainerDied","Data":"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017"} Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272570 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" event={"ID":"a79a35ba-b1cc-4c3b-bf33-43ff1af46972","Type":"ContainerDied","Data":"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5"} Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272581 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj" event={"ID":"a79a35ba-b1cc-4c3b-bf33-43ff1af46972","Type":"ContainerDied","Data":"56333537dfe5e2df48c43392870b0dd22de86314f523847238b06b93f4756820"} Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.272596 4702 scope.go:117] "RemoveContainer" containerID="e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.275233 4702 generic.go:334] "Generic (PLEG): container finished" podID="0a31b7b4-f333-4334-8940-873e0a462d72" containerID="dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b" exitCode=0 Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.275304 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="keystone-kuttl-tests/openstack-galera-0" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.276252 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-pm8nx" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.277113 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-pm8nx" event={"ID":"0a31b7b4-f333-4334-8940-873e0a462d72","Type":"ContainerDied","Data":"dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b"} Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.277156 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-pm8nx" event={"ID":"0a31b7b4-f333-4334-8940-873e0a462d72","Type":"ContainerDied","Data":"9ccc9c90b436ce3ea98d76279dec629ace478efc9baf788f952aabd18f7665ca"} Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.291001 4702 scope.go:117] "RemoveContainer" containerID="26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.311062 4702 scope.go:117] "RemoveContainer" containerID="e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017" Nov 25 11:08:09 crc kubenswrapper[4702]: E1125 11:08:09.311530 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017\": container with ID starting with e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017 not found: ID does not exist" containerID="e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.311568 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017"} err="failed to get container status \"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017\": rpc error: code = NotFound desc = could not find container \"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017\": container with ID starting with e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017 not found: ID does not exist" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.311591 4702 scope.go:117] "RemoveContainer" containerID="26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5" Nov 25 11:08:09 crc kubenswrapper[4702]: E1125 11:08:09.311936 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5\": container with ID starting with 26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5 not found: ID does not exist" containerID="26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.311958 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5"} err="failed to get container status \"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5\": rpc error: code = NotFound desc = could not find container \"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5\": container with ID starting with 26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5 not found: ID does not exist" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.311974 4702 scope.go:117] "RemoveContainer" containerID="e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.312168 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017"} err="failed to get container status \"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017\": rpc error: code = NotFound desc = could not find container \"e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017\": container with ID starting with e80e2edc95573d59505d9e17eb355cec19c87a42aca6128419d0cc3feeff5017 not found: ID does not exist" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.312186 4702 scope.go:117] "RemoveContainer" containerID="26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.312384 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5"} err="failed to get container status \"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5\": rpc error: code = NotFound desc = could not find container \"26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5\": container with ID starting with 26658123ce259ef1d20619de24102546cf68fb79f10ce7d75217b759dc867cc5 not found: ID does not exist" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.312404 4702 scope.go:117] "RemoveContainer" containerID="dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.312468 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.321747 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-647cf9c56-wn9rj"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.327954 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-pm8nx"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.329289 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-pm8nx"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.366505 4702 scope.go:117] "RemoveContainer" containerID="dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b" Nov 25 11:08:09 crc kubenswrapper[4702]: E1125 11:08:09.366967 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b\": container with ID starting with dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b not found: ID does not exist" containerID="dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.366998 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b"} err="failed to get container status \"dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b\": rpc error: code = NotFound desc = could not find container \"dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b\": container with ID starting with dbd58923b8ecc32a913eda63423e5b2647376fea8d7a132ad7787f8b22c5e06b not found: ID does not exist" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.383616 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.389456 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["keystone-kuttl-tests/openstack-galera-0"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.411739 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0a31b7b4-f333-4334-8940-873e0a462d72" path="/var/lib/kubelet/pods/0a31b7b4-f333-4334-8940-873e0a462d72/volumes" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.412323 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="78dea2f0-701b-4124-8def-a3c353705d62" path="/var/lib/kubelet/pods/78dea2f0-701b-4124-8def-a3c353705d62/volumes" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.413108 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="81a8be8b-a635-4cf1-a6a9-8b77e4b55426" path="/var/lib/kubelet/pods/81a8be8b-a635-4cf1-a6a9-8b77e4b55426/volumes" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.414306 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" path="/var/lib/kubelet/pods/a79a35ba-b1cc-4c3b-bf33-43ff1af46972/volumes" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.414884 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" path="/var/lib/kubelet/pods/f3415a28-7b55-4649-94fd-9b976e6919d7/volumes" Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.991526 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8"] Nov 25 11:08:09 crc kubenswrapper[4702]: I1125 11:08:09.991765 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" containerID="cri-o://141dec66156e2f971af2e8c4ceda67d71beeee7771b69b099aed85f101a0ecb6" gracePeriod=10 Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.269829 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-vdhbr"] Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.270314 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-vdhbr" podUID="57834186-3a16-48fa-a5b5-fc12a25825af" containerName="registry-server" containerID="cri-o://75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf" gracePeriod=30 Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.299677 4702 generic.go:334] "Generic (PLEG): container finished" podID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerID="141dec66156e2f971af2e8c4ceda67d71beeee7771b69b099aed85f101a0ecb6" exitCode=0 Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.300160 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerDied","Data":"141dec66156e2f971af2e8c4ceda67d71beeee7771b69b099aed85f101a0ecb6"} Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.300210 4702 scope.go:117] "RemoveContainer" containerID="af7b3848b73867c28f98b72fc812b052d46b8b69b2ade306b822c38dec790b28" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.337989 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw"] Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.342578 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/c976308faac62824ee875fa80dce4db57a79e32adb8a627dd31cdf72f65jwnw"] Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.448539 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.598611 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-apiservice-cert\") pod \"c7e80c7d-91e3-4953-bf91-d35441e38743\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.598678 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-webhook-cert\") pod \"c7e80c7d-91e3-4953-bf91-d35441e38743\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.598704 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xznc5\" (UniqueName: \"kubernetes.io/projected/c7e80c7d-91e3-4953-bf91-d35441e38743-kube-api-access-xznc5\") pod \"c7e80c7d-91e3-4953-bf91-d35441e38743\" (UID: \"c7e80c7d-91e3-4953-bf91-d35441e38743\") " Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.606139 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "c7e80c7d-91e3-4953-bf91-d35441e38743" (UID: "c7e80c7d-91e3-4953-bf91-d35441e38743"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.613132 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c7e80c7d-91e3-4953-bf91-d35441e38743-kube-api-access-xznc5" (OuterVolumeSpecName: "kube-api-access-xznc5") pod "c7e80c7d-91e3-4953-bf91-d35441e38743" (UID: "c7e80c7d-91e3-4953-bf91-d35441e38743"). InnerVolumeSpecName "kube-api-access-xznc5". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.617182 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "c7e80c7d-91e3-4953-bf91-d35441e38743" (UID: "c7e80c7d-91e3-4953-bf91-d35441e38743"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.700232 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xznc5\" (UniqueName: \"kubernetes.io/projected/c7e80c7d-91e3-4953-bf91-d35441e38743-kube-api-access-xznc5\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.700265 4702 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-apiservice-cert\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.700279 4702 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/c7e80c7d-91e3-4953-bf91-d35441e38743-webhook-cert\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.710506 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.800784 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hg6xn\" (UniqueName: \"kubernetes.io/projected/57834186-3a16-48fa-a5b5-fc12a25825af-kube-api-access-hg6xn\") pod \"57834186-3a16-48fa-a5b5-fc12a25825af\" (UID: \"57834186-3a16-48fa-a5b5-fc12a25825af\") " Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.803945 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57834186-3a16-48fa-a5b5-fc12a25825af-kube-api-access-hg6xn" (OuterVolumeSpecName: "kube-api-access-hg6xn") pod "57834186-3a16-48fa-a5b5-fc12a25825af" (UID: "57834186-3a16-48fa-a5b5-fc12a25825af"). InnerVolumeSpecName "kube-api-access-hg6xn". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:10 crc kubenswrapper[4702]: I1125 11:08:10.902454 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hg6xn\" (UniqueName: \"kubernetes.io/projected/57834186-3a16-48fa-a5b5-fc12a25825af-kube-api-access-hg6xn\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.308443 4702 generic.go:334] "Generic (PLEG): container finished" podID="57834186-3a16-48fa-a5b5-fc12a25825af" containerID="75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf" exitCode=0 Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.308875 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-vdhbr" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.311073 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-vdhbr" event={"ID":"57834186-3a16-48fa-a5b5-fc12a25825af","Type":"ContainerDied","Data":"75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf"} Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.311156 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-vdhbr" event={"ID":"57834186-3a16-48fa-a5b5-fc12a25825af","Type":"ContainerDied","Data":"b539766b96c30af7aabaff41faad48baa9e2b95ab4b72040e90b62617ec5e25c"} Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.311185 4702 scope.go:117] "RemoveContainer" containerID="75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.312680 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" event={"ID":"c7e80c7d-91e3-4953-bf91-d35441e38743","Type":"ContainerDied","Data":"b8030e119264ab27956a9219011c8bc132d1eddedfe9ba52f9303b1ee1f28bbb"} Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.312701 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.342400 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-vdhbr"] Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.347398 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-vdhbr"] Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.348284 4702 scope.go:117] "RemoveContainer" containerID="75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf" Nov 25 11:08:11 crc kubenswrapper[4702]: E1125 11:08:11.349038 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf\": container with ID starting with 75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf not found: ID does not exist" containerID="75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.349073 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf"} err="failed to get container status \"75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf\": rpc error: code = NotFound desc = could not find container \"75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf\": container with ID starting with 75ab923136c0ec54ce68ccb4aede9e8c10de2bf76c0d90f0cf1f4811de3703cf not found: ID does not exist" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.349098 4702 scope.go:117] "RemoveContainer" containerID="141dec66156e2f971af2e8c4ceda67d71beeee7771b69b099aed85f101a0ecb6" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.368469 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8"] Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.370226 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-656f99cfb7-jldk8"] Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.410608 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57834186-3a16-48fa-a5b5-fc12a25825af" path="/var/lib/kubelet/pods/57834186-3a16-48fa-a5b5-fc12a25825af/volumes" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.411208 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="91dec7ac-37c4-4a24-bd96-3fdcba2e5980" path="/var/lib/kubelet/pods/91dec7ac-37c4-4a24-bd96-3fdcba2e5980/volumes" Nov 25 11:08:11 crc kubenswrapper[4702]: I1125 11:08:11.412015 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" path="/var/lib/kubelet/pods/c7e80c7d-91e3-4953-bf91-d35441e38743/volumes" Nov 25 11:08:12 crc kubenswrapper[4702]: I1125 11:08:12.580764 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9"] Nov 25 11:08:12 crc kubenswrapper[4702]: I1125 11:08:12.581657 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" podUID="1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" containerName="operator" containerID="cri-o://96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b" gracePeriod=10 Nov 25 11:08:12 crc kubenswrapper[4702]: I1125 11:08:12.893487 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-qpv8l"] Nov 25 11:08:12 crc kubenswrapper[4702]: I1125 11:08:12.893674 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" podUID="30210a41-8b19-4210-950f-5035af734552" containerName="registry-server" containerID="cri-o://67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1" gracePeriod=30 Nov 25 11:08:12 crc kubenswrapper[4702]: I1125 11:08:12.925962 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p"] Nov 25 11:08:12 crc kubenswrapper[4702]: I1125 11:08:12.934414 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e5909jw8p"] Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.037383 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.126839 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lppj9\" (UniqueName: \"kubernetes.io/projected/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17-kube-api-access-lppj9\") pod \"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17\" (UID: \"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17\") " Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.146922 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17-kube-api-access-lppj9" (OuterVolumeSpecName: "kube-api-access-lppj9") pod "1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" (UID: "1599a060-4e8e-4a9f-a5af-ddb18e7c1e17"). InnerVolumeSpecName "kube-api-access-lppj9". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.229098 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lppj9\" (UniqueName: \"kubernetes.io/projected/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17-kube-api-access-lppj9\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.289009 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.331458 4702 generic.go:334] "Generic (PLEG): container finished" podID="1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" containerID="96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b" exitCode=0 Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.331528 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" event={"ID":"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17","Type":"ContainerDied","Data":"96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b"} Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.331568 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.331806 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9" event={"ID":"1599a060-4e8e-4a9f-a5af-ddb18e7c1e17","Type":"ContainerDied","Data":"821d7818ef3681962bb2109e2283b4ae1425cc55c5d7734784f2bcccf1497b01"} Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.331830 4702 scope.go:117] "RemoveContainer" containerID="96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.333562 4702 generic.go:334] "Generic (PLEG): container finished" podID="30210a41-8b19-4210-950f-5035af734552" containerID="67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1" exitCode=0 Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.333583 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" event={"ID":"30210a41-8b19-4210-950f-5035af734552","Type":"ContainerDied","Data":"67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1"} Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.333606 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" event={"ID":"30210a41-8b19-4210-950f-5035af734552","Type":"ContainerDied","Data":"52bc1be2ebb6f61ff376ad3b2f72abb7fb78566efa9f11f999f8a55f05ebc741"} Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.333679 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-qpv8l" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.349375 4702 scope.go:117] "RemoveContainer" containerID="96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b" Nov 25 11:08:13 crc kubenswrapper[4702]: E1125 11:08:13.350064 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b\": container with ID starting with 96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b not found: ID does not exist" containerID="96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.350090 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b"} err="failed to get container status \"96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b\": rpc error: code = NotFound desc = could not find container \"96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b\": container with ID starting with 96367f5e40b6209ee0641e9775cfd2cd5803b5d154fbaee94abb7527b64e781b not found: ID does not exist" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.350109 4702 scope.go:117] "RemoveContainer" containerID="67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.365718 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9"] Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.370807 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hzqw9"] Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.371099 4702 scope.go:117] "RemoveContainer" containerID="67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1" Nov 25 11:08:13 crc kubenswrapper[4702]: E1125 11:08:13.371615 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1\": container with ID starting with 67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1 not found: ID does not exist" containerID="67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.371695 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1"} err="failed to get container status \"67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1\": rpc error: code = NotFound desc = could not find container \"67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1\": container with ID starting with 67a36a02bce1ded27736f1991261e68f579458c15b897ea6aab1c11edc2cb0f1 not found: ID does not exist" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.410569 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" path="/var/lib/kubelet/pods/1599a060-4e8e-4a9f-a5af-ddb18e7c1e17/volumes" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.411263 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4ee32898-9ad6-4bca-9bd8-b12d7bb291a6" path="/var/lib/kubelet/pods/4ee32898-9ad6-4bca-9bd8-b12d7bb291a6/volumes" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.430764 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6f9wg\" (UniqueName: \"kubernetes.io/projected/30210a41-8b19-4210-950f-5035af734552-kube-api-access-6f9wg\") pod \"30210a41-8b19-4210-950f-5035af734552\" (UID: \"30210a41-8b19-4210-950f-5035af734552\") " Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.435750 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/30210a41-8b19-4210-950f-5035af734552-kube-api-access-6f9wg" (OuterVolumeSpecName: "kube-api-access-6f9wg") pod "30210a41-8b19-4210-950f-5035af734552" (UID: "30210a41-8b19-4210-950f-5035af734552"). InnerVolumeSpecName "kube-api-access-6f9wg". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.532261 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6f9wg\" (UniqueName: \"kubernetes.io/projected/30210a41-8b19-4210-950f-5035af734552-kube-api-access-6f9wg\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.662071 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-qpv8l"] Nov 25 11:08:13 crc kubenswrapper[4702]: I1125 11:08:13.665203 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-qpv8l"] Nov 25 11:08:15 crc kubenswrapper[4702]: I1125 11:08:15.412737 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="30210a41-8b19-4210-950f-5035af734552" path="/var/lib/kubelet/pods/30210a41-8b19-4210-950f-5035af734552/volumes" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.150994 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-98s8x/must-gather-fhkjq"] Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151728 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerName="setup-container" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151741 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerName="setup-container" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151752 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0a31b7b4-f333-4334-8940-873e0a462d72" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151758 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="0a31b7b4-f333-4334-8940-873e0a462d72" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151768 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dea2f0-701b-4124-8def-a3c353705d62" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151774 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dea2f0-701b-4124-8def-a3c353705d62" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151784 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f106a32a-e625-4380-9d1c-683bbf9036bc" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151789 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f106a32a-e625-4380-9d1c-683bbf9036bc" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151802 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" containerName="operator" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151807 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" containerName="operator" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151815 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151822 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151828 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7c562a4b-ceef-41a2-aa46-a2962017eb2b" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151834 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="7c562a4b-ceef-41a2-aa46-a2962017eb2b" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151843 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151850 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151856 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3a68041-1390-4922-81b3-ca65322db681" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151861 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3a68041-1390-4922-81b3-ca65322db681" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151870 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151875 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151883 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="438d54fb-aec3-476a-ae67-1d906854d271" containerName="memcached" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151888 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="438d54fb-aec3-476a-ae67-1d906854d271" containerName="memcached" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151914 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151920 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151927 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" containerName="keystone-api" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151933 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" containerName="keystone-api" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151939 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3a68041-1390-4922-81b3-ca65322db681" containerName="mysql-bootstrap" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151945 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3a68041-1390-4922-81b3-ca65322db681" containerName="mysql-bootstrap" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151956 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="57834186-3a16-48fa-a5b5-fc12a25825af" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151962 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="57834186-3a16-48fa-a5b5-fc12a25825af" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151970 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="kube-rbac-proxy" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151976 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="kube-rbac-proxy" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.151989 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerName="rabbitmq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.151995 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerName="rabbitmq" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152005 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152013 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152025 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152031 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152039 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerName="mysql-bootstrap" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152045 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerName="mysql-bootstrap" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152052 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="78dea2f0-701b-4124-8def-a3c353705d62" containerName="mysql-bootstrap" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152057 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="78dea2f0-701b-4124-8def-a3c353705d62" containerName="mysql-bootstrap" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152066 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="30210a41-8b19-4210-950f-5035af734552" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152073 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="30210a41-8b19-4210-950f-5035af734552" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152083 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ef9ace14-2e75-4123-aef0-fc3fa0edd31e" containerName="mariadb-account-delete" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152090 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="ef9ace14-2e75-4123-aef0-fc3fa0edd31e" containerName="mariadb-account-delete" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152199 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f106a32a-e625-4380-9d1c-683bbf9036bc" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152215 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="438d54fb-aec3-476a-ae67-1d906854d271" containerName="memcached" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152225 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152234 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="57834186-3a16-48fa-a5b5-fc12a25825af" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152246 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="30210a41-8b19-4210-950f-5035af734552" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152254 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="d83d39a8-b9f9-4526-ba09-9aa3848fe7b8" containerName="rabbitmq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152262 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="kube-rbac-proxy" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152272 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152279 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a79a35ba-b1cc-4c3b-bf33-43ff1af46972" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152289 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152299 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="7bfbef7f-d1e8-48e6-936b-9b6bf68df4f5" containerName="keystone-api" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152307 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152316 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="ef9ace14-2e75-4123-aef0-fc3fa0edd31e" containerName="mariadb-account-delete" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152323 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3a68041-1390-4922-81b3-ca65322db681" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152332 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="7c562a4b-ceef-41a2-aa46-a2962017eb2b" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152342 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="1599a060-4e8e-4a9f-a5af-ddb18e7c1e17" containerName="operator" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152351 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3415a28-7b55-4649-94fd-9b976e6919d7" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152358 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="0a31b7b4-f333-4334-8940-873e0a462d72" containerName="registry-server" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152367 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="78dea2f0-701b-4124-8def-a3c353705d62" containerName="galera" Nov 25 11:08:27 crc kubenswrapper[4702]: E1125 11:08:27.152488 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152500 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.152620 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="c7e80c7d-91e3-4953-bf91-d35441e38743" containerName="manager" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.153118 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.155370 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-98s8x"/"default-dockercfg-r68x6" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.156076 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-98s8x"/"openshift-service-ca.crt" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.156609 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-98s8x"/"kube-root-ca.crt" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.157501 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-98s8x/must-gather-fhkjq"] Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.202583 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5cn65\" (UniqueName: \"kubernetes.io/projected/f06992a6-c074-4f7c-a6db-ea83b8c806dc-kube-api-access-5cn65\") pod \"must-gather-fhkjq\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.202666 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f06992a6-c074-4f7c-a6db-ea83b8c806dc-must-gather-output\") pod \"must-gather-fhkjq\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.304218 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5cn65\" (UniqueName: \"kubernetes.io/projected/f06992a6-c074-4f7c-a6db-ea83b8c806dc-kube-api-access-5cn65\") pod \"must-gather-fhkjq\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.304645 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f06992a6-c074-4f7c-a6db-ea83b8c806dc-must-gather-output\") pod \"must-gather-fhkjq\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.305116 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f06992a6-c074-4f7c-a6db-ea83b8c806dc-must-gather-output\") pod \"must-gather-fhkjq\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.326817 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5cn65\" (UniqueName: \"kubernetes.io/projected/f06992a6-c074-4f7c-a6db-ea83b8c806dc-kube-api-access-5cn65\") pod \"must-gather-fhkjq\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.472013 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.915393 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-98s8x/must-gather-fhkjq"] Nov 25 11:08:27 crc kubenswrapper[4702]: I1125 11:08:27.919665 4702 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 11:08:28 crc kubenswrapper[4702]: I1125 11:08:28.434916 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-98s8x/must-gather-fhkjq" event={"ID":"f06992a6-c074-4f7c-a6db-ea83b8c806dc","Type":"ContainerStarted","Data":"0cf498dc8c3e9b5bb284d0d3d86ab6baa7c86731b7d13f258e9190dc8fc599f8"} Nov 25 11:08:31 crc kubenswrapper[4702]: I1125 11:08:31.463432 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-98s8x/must-gather-fhkjq" event={"ID":"f06992a6-c074-4f7c-a6db-ea83b8c806dc","Type":"ContainerStarted","Data":"227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889"} Nov 25 11:08:32 crc kubenswrapper[4702]: I1125 11:08:32.470942 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-98s8x/must-gather-fhkjq" event={"ID":"f06992a6-c074-4f7c-a6db-ea83b8c806dc","Type":"ContainerStarted","Data":"289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff"} Nov 25 11:08:32 crc kubenswrapper[4702]: I1125 11:08:32.486914 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-98s8x/must-gather-fhkjq" podStartSLOduration=2.319141172 podStartE2EDuration="5.486863513s" podCreationTimestamp="2025-11-25 11:08:27 +0000 UTC" firstStartedPulling="2025-11-25 11:08:27.919439024 +0000 UTC m=+2205.286034713" lastFinishedPulling="2025-11-25 11:08:31.087161365 +0000 UTC m=+2208.453757054" observedRunningTime="2025-11-25 11:08:32.485925046 +0000 UTC m=+2209.852520755" watchObservedRunningTime="2025-11-25 11:08:32.486863513 +0000 UTC m=+2209.853459222" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.400670 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-cfnlx"] Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.401991 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.410217 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cfnlx"] Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.509763 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-utilities\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.509851 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-catalog-content\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.509876 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bmgcz\" (UniqueName: \"kubernetes.io/projected/99f80611-2550-427d-8c3c-2b70325a83f9-kube-api-access-bmgcz\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.610747 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-utilities\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.610818 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-catalog-content\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.610839 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bmgcz\" (UniqueName: \"kubernetes.io/projected/99f80611-2550-427d-8c3c-2b70325a83f9-kube-api-access-bmgcz\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.611496 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-utilities\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.611707 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-catalog-content\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.635624 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bmgcz\" (UniqueName: \"kubernetes.io/projected/99f80611-2550-427d-8c3c-2b70325a83f9-kube-api-access-bmgcz\") pod \"community-operators-cfnlx\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:35 crc kubenswrapper[4702]: I1125 11:08:35.734611 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:36 crc kubenswrapper[4702]: I1125 11:08:36.018869 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-cfnlx"] Nov 25 11:08:36 crc kubenswrapper[4702]: W1125 11:08:36.023882 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod99f80611_2550_427d_8c3c_2b70325a83f9.slice/crio-f792131a3cd425a347a9c271641ed7d066c43c2b6e914999039c45241fbe1b04 WatchSource:0}: Error finding container f792131a3cd425a347a9c271641ed7d066c43c2b6e914999039c45241fbe1b04: Status 404 returned error can't find the container with id f792131a3cd425a347a9c271641ed7d066c43c2b6e914999039c45241fbe1b04 Nov 25 11:08:36 crc kubenswrapper[4702]: I1125 11:08:36.494563 4702 generic.go:334] "Generic (PLEG): container finished" podID="99f80611-2550-427d-8c3c-2b70325a83f9" containerID="fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa" exitCode=0 Nov 25 11:08:36 crc kubenswrapper[4702]: I1125 11:08:36.494613 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfnlx" event={"ID":"99f80611-2550-427d-8c3c-2b70325a83f9","Type":"ContainerDied","Data":"fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa"} Nov 25 11:08:36 crc kubenswrapper[4702]: I1125 11:08:36.494641 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfnlx" event={"ID":"99f80611-2550-427d-8c3c-2b70325a83f9","Type":"ContainerStarted","Data":"f792131a3cd425a347a9c271641ed7d066c43c2b6e914999039c45241fbe1b04"} Nov 25 11:08:37 crc kubenswrapper[4702]: I1125 11:08:37.515754 4702 generic.go:334] "Generic (PLEG): container finished" podID="99f80611-2550-427d-8c3c-2b70325a83f9" containerID="f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a" exitCode=0 Nov 25 11:08:37 crc kubenswrapper[4702]: I1125 11:08:37.515839 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfnlx" event={"ID":"99f80611-2550-427d-8c3c-2b70325a83f9","Type":"ContainerDied","Data":"f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a"} Nov 25 11:08:38 crc kubenswrapper[4702]: I1125 11:08:38.525981 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfnlx" event={"ID":"99f80611-2550-427d-8c3c-2b70325a83f9","Type":"ContainerStarted","Data":"cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9"} Nov 25 11:08:45 crc kubenswrapper[4702]: I1125 11:08:45.735065 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:45 crc kubenswrapper[4702]: I1125 11:08:45.735686 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:45 crc kubenswrapper[4702]: I1125 11:08:45.777123 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:45 crc kubenswrapper[4702]: I1125 11:08:45.795802 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-cfnlx" podStartSLOduration=9.330326292 podStartE2EDuration="10.795786729s" podCreationTimestamp="2025-11-25 11:08:35 +0000 UTC" firstStartedPulling="2025-11-25 11:08:36.495803573 +0000 UTC m=+2213.862399272" lastFinishedPulling="2025-11-25 11:08:37.96126402 +0000 UTC m=+2215.327859709" observedRunningTime="2025-11-25 11:08:38.545966904 +0000 UTC m=+2215.912562603" watchObservedRunningTime="2025-11-25 11:08:45.795786729 +0000 UTC m=+2223.162382418" Nov 25 11:08:46 crc kubenswrapper[4702]: I1125 11:08:46.616637 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:46 crc kubenswrapper[4702]: I1125 11:08:46.658856 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cfnlx"] Nov 25 11:08:48 crc kubenswrapper[4702]: I1125 11:08:48.587624 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-cfnlx" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="registry-server" containerID="cri-o://cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9" gracePeriod=2 Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.261526 4702 scope.go:117] "RemoveContainer" containerID="161c5c26cba12480bc73ce12a341f3c734f9ce20a4a19f289b6215e135a39fbb" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.282819 4702 scope.go:117] "RemoveContainer" containerID="4405ae38d3adb909c25d5148edb64cf6bce1466016b42e53282f1b1d188b9ca8" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.298255 4702 scope.go:117] "RemoveContainer" containerID="681bfa4695b0ef11796f8b067be07d8e3ff16c7aed72e61510586c3eb3631130" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.318865 4702 scope.go:117] "RemoveContainer" containerID="1234e7607711fa560e795101d30d28592177f15de2f41adefec2d09afce863a3" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.363518 4702 scope.go:117] "RemoveContainer" containerID="73d5e34f3c755fe0cae9dbe43a00fd1a0393ff2c472bbc236f378ba6173b1d7a" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.383058 4702 scope.go:117] "RemoveContainer" containerID="15bf19a3beded83cb15b577f6b4c40c036ef0778894b3eb712fb9eba47ce529b" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.414227 4702 scope.go:117] "RemoveContainer" containerID="dbeaddab60fbe6ea747939d3bac827c3af7a2019ecc90edf27e6752bd86558bb" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.445132 4702 scope.go:117] "RemoveContainer" containerID="53feb5236c141b53846c60a8d6997e271738abe1b58cb46391271de7086b5e8c" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.466562 4702 scope.go:117] "RemoveContainer" containerID="b12c1d8415b206ae55f9b0eef6e039d2d6e9349fe58bc1f24a1d752a43759216" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.489341 4702 scope.go:117] "RemoveContainer" containerID="1bb300c1d91a75fe78b93d9cbf105112d102e1e923a6f8d8190b3d5cd1ee66fa" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.519293 4702 scope.go:117] "RemoveContainer" containerID="ea8e88f760c62548da8284edc88970a6fb35142659b77aa01649850b0373e80f" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.526328 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.547463 4702 scope.go:117] "RemoveContainer" containerID="e3e6b965d770280718b1f141caec6f653cbaf915b00ffa7fa3ac958db9c6af55" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.570563 4702 scope.go:117] "RemoveContainer" containerID="8de91ee7f27bf7330c8b9346b8ab5aa93d77de3bf601ffd7c126715e0adfa1fa" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.591960 4702 scope.go:117] "RemoveContainer" containerID="0b0a58def35a854f026ccf555c0f0542874c3073569f6a6ac85a710fd19c8db6" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.601156 4702 generic.go:334] "Generic (PLEG): container finished" podID="99f80611-2550-427d-8c3c-2b70325a83f9" containerID="cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9" exitCode=0 Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.601211 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfnlx" event={"ID":"99f80611-2550-427d-8c3c-2b70325a83f9","Type":"ContainerDied","Data":"cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9"} Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.601239 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-cfnlx" event={"ID":"99f80611-2550-427d-8c3c-2b70325a83f9","Type":"ContainerDied","Data":"f792131a3cd425a347a9c271641ed7d066c43c2b6e914999039c45241fbe1b04"} Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.601255 4702 scope.go:117] "RemoveContainer" containerID="cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.601341 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-cfnlx" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.610159 4702 scope.go:117] "RemoveContainer" containerID="9e3933617a4a19f8c19bc8c3eb6325b38ff22343c5a08d2f2ad2515a7065534e" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.620925 4702 scope.go:117] "RemoveContainer" containerID="f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.639740 4702 scope.go:117] "RemoveContainer" containerID="d962d7e00791a4f71fd52183116aa7ac77c107948516c26bfda67efb406f5f0a" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.646717 4702 scope.go:117] "RemoveContainer" containerID="fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.675301 4702 scope.go:117] "RemoveContainer" containerID="cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9" Nov 25 11:08:49 crc kubenswrapper[4702]: E1125 11:08:49.676053 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9\": container with ID starting with cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9 not found: ID does not exist" containerID="cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.676084 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9"} err="failed to get container status \"cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9\": rpc error: code = NotFound desc = could not find container \"cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9\": container with ID starting with cdf1f7864cab83372aecd835858f1b4838c19c1db4d9fdd9647113eb3df4c4a9 not found: ID does not exist" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.676107 4702 scope.go:117] "RemoveContainer" containerID="f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a" Nov 25 11:08:49 crc kubenswrapper[4702]: E1125 11:08:49.676387 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a\": container with ID starting with f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a not found: ID does not exist" containerID="f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.676424 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a"} err="failed to get container status \"f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a\": rpc error: code = NotFound desc = could not find container \"f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a\": container with ID starting with f418dc6446499fe4ee016877285a72d8d3ee3562850d35e34f4ebf925cdd264a not found: ID does not exist" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.676444 4702 scope.go:117] "RemoveContainer" containerID="fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa" Nov 25 11:08:49 crc kubenswrapper[4702]: E1125 11:08:49.676764 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa\": container with ID starting with fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa not found: ID does not exist" containerID="fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.676791 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa"} err="failed to get container status \"fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa\": rpc error: code = NotFound desc = could not find container \"fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa\": container with ID starting with fb1d727a3fb633640ba80ae5d95e9cab9d7239d42ea1a2a3dbc66ed618d899aa not found: ID does not exist" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.686428 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bmgcz\" (UniqueName: \"kubernetes.io/projected/99f80611-2550-427d-8c3c-2b70325a83f9-kube-api-access-bmgcz\") pod \"99f80611-2550-427d-8c3c-2b70325a83f9\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.686518 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-catalog-content\") pod \"99f80611-2550-427d-8c3c-2b70325a83f9\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.686577 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-utilities\") pod \"99f80611-2550-427d-8c3c-2b70325a83f9\" (UID: \"99f80611-2550-427d-8c3c-2b70325a83f9\") " Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.687605 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-utilities" (OuterVolumeSpecName: "utilities") pod "99f80611-2550-427d-8c3c-2b70325a83f9" (UID: "99f80611-2550-427d-8c3c-2b70325a83f9"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.696237 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/99f80611-2550-427d-8c3c-2b70325a83f9-kube-api-access-bmgcz" (OuterVolumeSpecName: "kube-api-access-bmgcz") pod "99f80611-2550-427d-8c3c-2b70325a83f9" (UID: "99f80611-2550-427d-8c3c-2b70325a83f9"). InnerVolumeSpecName "kube-api-access-bmgcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.739606 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "99f80611-2550-427d-8c3c-2b70325a83f9" (UID: "99f80611-2550-427d-8c3c-2b70325a83f9"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.788077 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.788114 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bmgcz\" (UniqueName: \"kubernetes.io/projected/99f80611-2550-427d-8c3c-2b70325a83f9-kube-api-access-bmgcz\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.788128 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/99f80611-2550-427d-8c3c-2b70325a83f9-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.933854 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-cfnlx"] Nov 25 11:08:49 crc kubenswrapper[4702]: I1125 11:08:49.939269 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-cfnlx"] Nov 25 11:08:51 crc kubenswrapper[4702]: I1125 11:08:51.410877 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" path="/var/lib/kubelet/pods/99f80611-2550-427d-8c3c-2b70325a83f9/volumes" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.771624 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-nm6bn"] Nov 25 11:09:08 crc kubenswrapper[4702]: E1125 11:09:08.772482 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="extract-utilities" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.772498 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="extract-utilities" Nov 25 11:09:08 crc kubenswrapper[4702]: E1125 11:09:08.772526 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="registry-server" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.772535 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="registry-server" Nov 25 11:09:08 crc kubenswrapper[4702]: E1125 11:09:08.772547 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="extract-content" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.772554 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="extract-content" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.772674 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="99f80611-2550-427d-8c3c-2b70325a83f9" containerName="registry-server" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.780579 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.821429 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nm6bn"] Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.830836 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vswkh\" (UniqueName: \"kubernetes.io/projected/758610cf-c4d5-4c26-a429-c9882deb5a37-kube-api-access-vswkh\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.830889 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-catalog-content\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.830928 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-utilities\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.932321 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vswkh\" (UniqueName: \"kubernetes.io/projected/758610cf-c4d5-4c26-a429-c9882deb5a37-kube-api-access-vswkh\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.932380 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-catalog-content\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.932418 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-utilities\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.933072 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-utilities\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.933550 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-catalog-content\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:08 crc kubenswrapper[4702]: I1125 11:09:08.953033 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vswkh\" (UniqueName: \"kubernetes.io/projected/758610cf-c4d5-4c26-a429-c9882deb5a37-kube-api-access-vswkh\") pod \"redhat-marketplace-nm6bn\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:09 crc kubenswrapper[4702]: I1125 11:09:09.119330 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:09 crc kubenswrapper[4702]: I1125 11:09:09.363332 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-nm6bn"] Nov 25 11:09:09 crc kubenswrapper[4702]: I1125 11:09:09.724494 4702 generic.go:334] "Generic (PLEG): container finished" podID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerID="3aef09317ba12b09ce75a3d41dc4d0ee9e9eee4fdbaede2c9b69aa7996be2add" exitCode=0 Nov 25 11:09:09 crc kubenswrapper[4702]: I1125 11:09:09.724833 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nm6bn" event={"ID":"758610cf-c4d5-4c26-a429-c9882deb5a37","Type":"ContainerDied","Data":"3aef09317ba12b09ce75a3d41dc4d0ee9e9eee4fdbaede2c9b69aa7996be2add"} Nov 25 11:09:09 crc kubenswrapper[4702]: I1125 11:09:09.725051 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nm6bn" event={"ID":"758610cf-c4d5-4c26-a429-c9882deb5a37","Type":"ContainerStarted","Data":"5dab21a8f6e514d93b2cd4f0a314aa68275ed7b17c85857ee5b23c470c402f19"} Nov 25 11:09:10 crc kubenswrapper[4702]: I1125 11:09:10.732977 4702 generic.go:334] "Generic (PLEG): container finished" podID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerID="28fdabb1545c5d623fab333e5a4998b056f35075f9bfe53520c25da15b1c6c2f" exitCode=0 Nov 25 11:09:10 crc kubenswrapper[4702]: I1125 11:09:10.733047 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nm6bn" event={"ID":"758610cf-c4d5-4c26-a429-c9882deb5a37","Type":"ContainerDied","Data":"28fdabb1545c5d623fab333e5a4998b056f35075f9bfe53520c25da15b1c6c2f"} Nov 25 11:09:11 crc kubenswrapper[4702]: I1125 11:09:11.740581 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nm6bn" event={"ID":"758610cf-c4d5-4c26-a429-c9882deb5a37","Type":"ContainerStarted","Data":"e9baac893d50b38006e99c54b090058a8752f11502e391b6d3db7e5cf6340c7f"} Nov 25 11:09:11 crc kubenswrapper[4702]: I1125 11:09:11.761520 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-nm6bn" podStartSLOduration=2.316558518 podStartE2EDuration="3.761502943s" podCreationTimestamp="2025-11-25 11:09:08 +0000 UTC" firstStartedPulling="2025-11-25 11:09:09.727279822 +0000 UTC m=+2247.093875511" lastFinishedPulling="2025-11-25 11:09:11.172224227 +0000 UTC m=+2248.538819936" observedRunningTime="2025-11-25 11:09:11.760642688 +0000 UTC m=+2249.127238377" watchObservedRunningTime="2025-11-25 11:09:11.761502943 +0000 UTC m=+2249.128098642" Nov 25 11:09:12 crc kubenswrapper[4702]: I1125 11:09:12.831293 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4jnl2_853aac53-23c5-4f78-a291-bc82dff9e338/control-plane-machine-set-operator/0.log" Nov 25 11:09:12 crc kubenswrapper[4702]: I1125 11:09:12.992751 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7s7k2_af02ba00-7c73-4bc3-a341-5dac59a49e12/kube-rbac-proxy/0.log" Nov 25 11:09:13 crc kubenswrapper[4702]: I1125 11:09:13.042947 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7s7k2_af02ba00-7c73-4bc3-a341-5dac59a49e12/machine-api-operator/0.log" Nov 25 11:09:13 crc kubenswrapper[4702]: I1125 11:09:13.591140 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:09:13 crc kubenswrapper[4702]: I1125 11:09:13.591207 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:09:19 crc kubenswrapper[4702]: I1125 11:09:19.120075 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:19 crc kubenswrapper[4702]: I1125 11:09:19.120724 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:19 crc kubenswrapper[4702]: I1125 11:09:19.177294 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:19 crc kubenswrapper[4702]: I1125 11:09:19.824505 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:19 crc kubenswrapper[4702]: I1125 11:09:19.861592 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nm6bn"] Nov 25 11:09:21 crc kubenswrapper[4702]: I1125 11:09:21.793161 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-nm6bn" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="registry-server" containerID="cri-o://e9baac893d50b38006e99c54b090058a8752f11502e391b6d3db7e5cf6340c7f" gracePeriod=2 Nov 25 11:09:22 crc kubenswrapper[4702]: I1125 11:09:22.810137 4702 generic.go:334] "Generic (PLEG): container finished" podID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerID="e9baac893d50b38006e99c54b090058a8752f11502e391b6d3db7e5cf6340c7f" exitCode=0 Nov 25 11:09:22 crc kubenswrapper[4702]: I1125 11:09:22.810212 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nm6bn" event={"ID":"758610cf-c4d5-4c26-a429-c9882deb5a37","Type":"ContainerDied","Data":"e9baac893d50b38006e99c54b090058a8752f11502e391b6d3db7e5cf6340c7f"} Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.085509 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.280544 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vswkh\" (UniqueName: \"kubernetes.io/projected/758610cf-c4d5-4c26-a429-c9882deb5a37-kube-api-access-vswkh\") pod \"758610cf-c4d5-4c26-a429-c9882deb5a37\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.280649 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-catalog-content\") pod \"758610cf-c4d5-4c26-a429-c9882deb5a37\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.280705 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-utilities\") pod \"758610cf-c4d5-4c26-a429-c9882deb5a37\" (UID: \"758610cf-c4d5-4c26-a429-c9882deb5a37\") " Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.281441 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-utilities" (OuterVolumeSpecName: "utilities") pod "758610cf-c4d5-4c26-a429-c9882deb5a37" (UID: "758610cf-c4d5-4c26-a429-c9882deb5a37"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.281816 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.288057 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/758610cf-c4d5-4c26-a429-c9882deb5a37-kube-api-access-vswkh" (OuterVolumeSpecName: "kube-api-access-vswkh") pod "758610cf-c4d5-4c26-a429-c9882deb5a37" (UID: "758610cf-c4d5-4c26-a429-c9882deb5a37"). InnerVolumeSpecName "kube-api-access-vswkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.299987 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "758610cf-c4d5-4c26-a429-c9882deb5a37" (UID: "758610cf-c4d5-4c26-a429-c9882deb5a37"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.326275 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-sxkc9"] Nov 25 11:09:23 crc kubenswrapper[4702]: E1125 11:09:23.326604 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="extract-content" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.326620 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="extract-content" Nov 25 11:09:23 crc kubenswrapper[4702]: E1125 11:09:23.326643 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="registry-server" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.326650 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="registry-server" Nov 25 11:09:23 crc kubenswrapper[4702]: E1125 11:09:23.326659 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="extract-utilities" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.326667 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="extract-utilities" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.326803 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" containerName="registry-server" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.327687 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.337447 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sxkc9"] Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.382675 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-catalog-content\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.382762 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-utilities\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.382790 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6thd7\" (UniqueName: \"kubernetes.io/projected/8ab5674b-fca7-4c29-a7cf-5f11285f1235-kube-api-access-6thd7\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.382857 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/758610cf-c4d5-4c26-a429-c9882deb5a37-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.382869 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vswkh\" (UniqueName: \"kubernetes.io/projected/758610cf-c4d5-4c26-a429-c9882deb5a37-kube-api-access-vswkh\") on node \"crc\" DevicePath \"\"" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.484248 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-utilities\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.484333 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6thd7\" (UniqueName: \"kubernetes.io/projected/8ab5674b-fca7-4c29-a7cf-5f11285f1235-kube-api-access-6thd7\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.484422 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-catalog-content\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.484785 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-utilities\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.484867 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-catalog-content\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.499931 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6thd7\" (UniqueName: \"kubernetes.io/projected/8ab5674b-fca7-4c29-a7cf-5f11285f1235-kube-api-access-6thd7\") pod \"certified-operators-sxkc9\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:23 crc kubenswrapper[4702]: I1125 11:09:23.660170 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.826419 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-nm6bn" event={"ID":"758610cf-c4d5-4c26-a429-c9882deb5a37","Type":"ContainerDied","Data":"5dab21a8f6e514d93b2cd4f0a314aa68275ed7b17c85857ee5b23c470c402f19"} Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.826472 4702 scope.go:117] "RemoveContainer" containerID="e9baac893d50b38006e99c54b090058a8752f11502e391b6d3db7e5cf6340c7f" Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.826638 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-nm6bn" Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.853174 4702 scope.go:117] "RemoveContainer" containerID="28fdabb1545c5d623fab333e5a4998b056f35075f9bfe53520c25da15b1c6c2f" Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.877799 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-nm6bn"] Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.895126 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-nm6bn"] Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.976776 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-sxkc9"] Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:23.977088 4702 scope.go:117] "RemoveContainer" containerID="3aef09317ba12b09ce75a3d41dc4d0ee9e9eee4fdbaede2c9b69aa7996be2add" Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:24.837088 4702 generic.go:334] "Generic (PLEG): container finished" podID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerID="dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a" exitCode=0 Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:24.837300 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxkc9" event={"ID":"8ab5674b-fca7-4c29-a7cf-5f11285f1235","Type":"ContainerDied","Data":"dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a"} Nov 25 11:09:24 crc kubenswrapper[4702]: I1125 11:09:24.837506 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxkc9" event={"ID":"8ab5674b-fca7-4c29-a7cf-5f11285f1235","Type":"ContainerStarted","Data":"7f6ea908f1941ee4cc4c173903c6030a596ae58f2934a19b699179afa949b832"} Nov 25 11:09:25 crc kubenswrapper[4702]: I1125 11:09:25.408816 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="758610cf-c4d5-4c26-a429-c9882deb5a37" path="/var/lib/kubelet/pods/758610cf-c4d5-4c26-a429-c9882deb5a37/volumes" Nov 25 11:09:27 crc kubenswrapper[4702]: I1125 11:09:27.854176 4702 generic.go:334] "Generic (PLEG): container finished" podID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerID="86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b" exitCode=0 Nov 25 11:09:27 crc kubenswrapper[4702]: I1125 11:09:27.854242 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxkc9" event={"ID":"8ab5674b-fca7-4c29-a7cf-5f11285f1235","Type":"ContainerDied","Data":"86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b"} Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.049341 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-2ck8z_973ff76e-6e14-4f6e-a0cb-fc3e5af9b694/kube-rbac-proxy/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.091042 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-2ck8z_973ff76e-6e14-4f6e-a0cb-fc3e5af9b694/controller/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.224757 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.374618 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.419046 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.426749 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.471005 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.604307 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.634330 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.645198 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.706711 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.860050 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/controller/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.862239 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxkc9" event={"ID":"8ab5674b-fca7-4c29-a7cf-5f11285f1235","Type":"ContainerStarted","Data":"b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae"} Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.883384 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-sxkc9" podStartSLOduration=2.372552703 podStartE2EDuration="5.883368153s" podCreationTimestamp="2025-11-25 11:09:23 +0000 UTC" firstStartedPulling="2025-11-25 11:09:24.84016964 +0000 UTC m=+2262.206765329" lastFinishedPulling="2025-11-25 11:09:28.35098509 +0000 UTC m=+2265.717580779" observedRunningTime="2025-11-25 11:09:28.880304315 +0000 UTC m=+2266.246900014" watchObservedRunningTime="2025-11-25 11:09:28.883368153 +0000 UTC m=+2266.249963842" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.885302 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.893707 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:09:28 crc kubenswrapper[4702]: I1125 11:09:28.896595 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.040892 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/frr-metrics/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.076463 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/kube-rbac-proxy/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.116226 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/kube-rbac-proxy-frr/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.318120 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/reloader/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.426258 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-z5pzw_eea53047-9f4a-400c-8db5-bcb0a8c08967/frr-k8s-webhook-server/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.473182 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/frr/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.507329 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-d9d597dc8-q6ll4_a6507463-185a-40db-9736-bfcc4f0928e9/manager/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.700150 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c9db6d78-h6mmv_f587e69f-3aaf-403e-a060-bf4542e19ec8/webhook-server/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.705533 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m5tjs_92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf/kube-rbac-proxy/0.log" Nov 25 11:09:29 crc kubenswrapper[4702]: I1125 11:09:29.948446 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m5tjs_92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf/speaker/0.log" Nov 25 11:09:33 crc kubenswrapper[4702]: I1125 11:09:33.660488 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:33 crc kubenswrapper[4702]: I1125 11:09:33.660552 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:33 crc kubenswrapper[4702]: I1125 11:09:33.701296 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:33 crc kubenswrapper[4702]: I1125 11:09:33.928731 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:33 crc kubenswrapper[4702]: I1125 11:09:33.976941 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sxkc9"] Nov 25 11:09:35 crc kubenswrapper[4702]: I1125 11:09:35.902315 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-sxkc9" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="registry-server" containerID="cri-o://b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae" gracePeriod=2 Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.760653 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.909130 4702 generic.go:334] "Generic (PLEG): container finished" podID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerID="b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae" exitCode=0 Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.909175 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxkc9" event={"ID":"8ab5674b-fca7-4c29-a7cf-5f11285f1235","Type":"ContainerDied","Data":"b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae"} Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.909205 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-sxkc9" event={"ID":"8ab5674b-fca7-4c29-a7cf-5f11285f1235","Type":"ContainerDied","Data":"7f6ea908f1941ee4cc4c173903c6030a596ae58f2934a19b699179afa949b832"} Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.909225 4702 scope.go:117] "RemoveContainer" containerID="b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.909364 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-sxkc9" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.926086 4702 scope.go:117] "RemoveContainer" containerID="86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.944526 4702 scope.go:117] "RemoveContainer" containerID="dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.955521 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-utilities\") pod \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.955565 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-catalog-content\") pod \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.955607 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6thd7\" (UniqueName: \"kubernetes.io/projected/8ab5674b-fca7-4c29-a7cf-5f11285f1235-kube-api-access-6thd7\") pod \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\" (UID: \"8ab5674b-fca7-4c29-a7cf-5f11285f1235\") " Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.956635 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-utilities" (OuterVolumeSpecName: "utilities") pod "8ab5674b-fca7-4c29-a7cf-5f11285f1235" (UID: "8ab5674b-fca7-4c29-a7cf-5f11285f1235"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.961530 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ab5674b-fca7-4c29-a7cf-5f11285f1235-kube-api-access-6thd7" (OuterVolumeSpecName: "kube-api-access-6thd7") pod "8ab5674b-fca7-4c29-a7cf-5f11285f1235" (UID: "8ab5674b-fca7-4c29-a7cf-5f11285f1235"). InnerVolumeSpecName "kube-api-access-6thd7". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.965355 4702 scope.go:117] "RemoveContainer" containerID="b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae" Nov 25 11:09:36 crc kubenswrapper[4702]: E1125 11:09:36.965751 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae\": container with ID starting with b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae not found: ID does not exist" containerID="b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.965781 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae"} err="failed to get container status \"b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae\": rpc error: code = NotFound desc = could not find container \"b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae\": container with ID starting with b3f6baba46b1045a074461ff04906b30395173cd0d634dbb8ccdae187488bfae not found: ID does not exist" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.965801 4702 scope.go:117] "RemoveContainer" containerID="86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b" Nov 25 11:09:36 crc kubenswrapper[4702]: E1125 11:09:36.966109 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b\": container with ID starting with 86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b not found: ID does not exist" containerID="86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.966129 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b"} err="failed to get container status \"86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b\": rpc error: code = NotFound desc = could not find container \"86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b\": container with ID starting with 86599a8169d422aaeda09853559b71459f83d1b50cc4915faab9c69fe911684b not found: ID does not exist" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.966141 4702 scope.go:117] "RemoveContainer" containerID="dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a" Nov 25 11:09:36 crc kubenswrapper[4702]: E1125 11:09:36.966341 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a\": container with ID starting with dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a not found: ID does not exist" containerID="dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a" Nov 25 11:09:36 crc kubenswrapper[4702]: I1125 11:09:36.966362 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a"} err="failed to get container status \"dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a\": rpc error: code = NotFound desc = could not find container \"dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a\": container with ID starting with dcc3118dfa2ccff7facdbdc78d072f9809f05bd53be5e34310ce2ff2c492a35a not found: ID does not exist" Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.008005 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "8ab5674b-fca7-4c29-a7cf-5f11285f1235" (UID: "8ab5674b-fca7-4c29-a7cf-5f11285f1235"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.056649 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.056684 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/8ab5674b-fca7-4c29-a7cf-5f11285f1235-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.056697 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6thd7\" (UniqueName: \"kubernetes.io/projected/8ab5674b-fca7-4c29-a7cf-5f11285f1235-kube-api-access-6thd7\") on node \"crc\" DevicePath \"\"" Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.257376 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-sxkc9"] Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.262260 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-sxkc9"] Nov 25 11:09:37 crc kubenswrapper[4702]: I1125 11:09:37.408663 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" path="/var/lib/kubelet/pods/8ab5674b-fca7-4c29-a7cf-5f11285f1235/volumes" Nov 25 11:09:43 crc kubenswrapper[4702]: I1125 11:09:43.590824 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:09:43 crc kubenswrapper[4702]: I1125 11:09:43.592322 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:09:52 crc kubenswrapper[4702]: I1125 11:09:52.784587 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-utilities/0.log" Nov 25 11:09:52 crc kubenswrapper[4702]: I1125 11:09:52.954972 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-utilities/0.log" Nov 25 11:09:52 crc kubenswrapper[4702]: I1125 11:09:52.957020 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-content/0.log" Nov 25 11:09:52 crc kubenswrapper[4702]: I1125 11:09:52.977603 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-content/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.137066 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-utilities/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.167461 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-content/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.330385 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-utilities/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.436038 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/registry-server/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.522460 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-content/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.542512 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-utilities/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.558132 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-content/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.804704 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-content/0.log" Nov 25 11:09:53 crc kubenswrapper[4702]: I1125 11:09:53.884451 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-utilities/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.032725 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/util/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.194728 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/util/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.210323 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/registry-server/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.240957 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/pull/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.264676 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/pull/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.424864 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/extract/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.456361 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/pull/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.495785 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/util/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.612499 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7mlcv_71f4b5c7-e444-4858-aa1d-4c80e32a7e96/marketplace-operator/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.706103 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-utilities/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.867773 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-utilities/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.873692 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-content/0.log" Nov 25 11:09:54 crc kubenswrapper[4702]: I1125 11:09:54.912720 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-content/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.047536 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-content/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.076030 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-utilities/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.154122 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/registry-server/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.244585 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-utilities/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.439047 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-utilities/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.444393 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-content/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.446125 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-content/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.607038 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-utilities/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.639544 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-content/0.log" Nov 25 11:09:55 crc kubenswrapper[4702]: I1125 11:09:55.873873 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/registry-server/0.log" Nov 25 11:10:13 crc kubenswrapper[4702]: I1125 11:10:13.591015 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:10:13 crc kubenswrapper[4702]: I1125 11:10:13.591584 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:10:13 crc kubenswrapper[4702]: I1125 11:10:13.591629 4702 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" Nov 25 11:10:13 crc kubenswrapper[4702]: I1125 11:10:13.592160 4702 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0"} pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Nov 25 11:10:13 crc kubenswrapper[4702]: I1125 11:10:13.592202 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" containerID="cri-o://9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" gracePeriod=600 Nov 25 11:10:13 crc kubenswrapper[4702]: E1125 11:10:13.724115 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:10:14 crc kubenswrapper[4702]: I1125 11:10:14.119648 4702 generic.go:334] "Generic (PLEG): container finished" podID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" exitCode=0 Nov 25 11:10:14 crc kubenswrapper[4702]: I1125 11:10:14.119698 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerDied","Data":"9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0"} Nov 25 11:10:14 crc kubenswrapper[4702]: I1125 11:10:14.119761 4702 scope.go:117] "RemoveContainer" containerID="2afa2cc519d18597e6cb179d37ccff1596b73bd4c5e742111c8ccb1dfcf95693" Nov 25 11:10:14 crc kubenswrapper[4702]: I1125 11:10:14.123796 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:10:14 crc kubenswrapper[4702]: E1125 11:10:14.125244 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:10:28 crc kubenswrapper[4702]: I1125 11:10:28.402273 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:10:28 crc kubenswrapper[4702]: E1125 11:10:28.403094 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:10:39 crc kubenswrapper[4702]: I1125 11:10:39.406089 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:10:39 crc kubenswrapper[4702]: E1125 11:10:39.406628 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:10:50 crc kubenswrapper[4702]: I1125 11:10:50.403793 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:10:50 crc kubenswrapper[4702]: E1125 11:10:50.404545 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:11:01 crc kubenswrapper[4702]: I1125 11:11:01.413681 4702 generic.go:334] "Generic (PLEG): container finished" podID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerID="227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889" exitCode=0 Nov 25 11:11:01 crc kubenswrapper[4702]: I1125 11:11:01.413777 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-98s8x/must-gather-fhkjq" event={"ID":"f06992a6-c074-4f7c-a6db-ea83b8c806dc","Type":"ContainerDied","Data":"227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889"} Nov 25 11:11:01 crc kubenswrapper[4702]: I1125 11:11:01.414643 4702 scope.go:117] "RemoveContainer" containerID="227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889" Nov 25 11:11:01 crc kubenswrapper[4702]: I1125 11:11:01.726085 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-98s8x_must-gather-fhkjq_f06992a6-c074-4f7c-a6db-ea83b8c806dc/gather/0.log" Nov 25 11:11:05 crc kubenswrapper[4702]: I1125 11:11:05.401693 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:11:05 crc kubenswrapper[4702]: E1125 11:11:05.402309 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.422533 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-98s8x/must-gather-fhkjq"] Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.423439 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-98s8x/must-gather-fhkjq" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="copy" containerID="cri-o://289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff" gracePeriod=2 Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.430658 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-98s8x/must-gather-fhkjq"] Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.832250 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-98s8x_must-gather-fhkjq_f06992a6-c074-4f7c-a6db-ea83b8c806dc/copy/0.log" Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.833752 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.944020 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f06992a6-c074-4f7c-a6db-ea83b8c806dc-must-gather-output\") pod \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.944281 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5cn65\" (UniqueName: \"kubernetes.io/projected/f06992a6-c074-4f7c-a6db-ea83b8c806dc-kube-api-access-5cn65\") pod \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\" (UID: \"f06992a6-c074-4f7c-a6db-ea83b8c806dc\") " Nov 25 11:11:08 crc kubenswrapper[4702]: I1125 11:11:08.957592 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f06992a6-c074-4f7c-a6db-ea83b8c806dc-kube-api-access-5cn65" (OuterVolumeSpecName: "kube-api-access-5cn65") pod "f06992a6-c074-4f7c-a6db-ea83b8c806dc" (UID: "f06992a6-c074-4f7c-a6db-ea83b8c806dc"). InnerVolumeSpecName "kube-api-access-5cn65". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.020966 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/f06992a6-c074-4f7c-a6db-ea83b8c806dc-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "f06992a6-c074-4f7c-a6db-ea83b8c806dc" (UID: "f06992a6-c074-4f7c-a6db-ea83b8c806dc"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.046227 4702 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/f06992a6-c074-4f7c-a6db-ea83b8c806dc-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.046471 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5cn65\" (UniqueName: \"kubernetes.io/projected/f06992a6-c074-4f7c-a6db-ea83b8c806dc-kube-api-access-5cn65\") on node \"crc\" DevicePath \"\"" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.413374 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" path="/var/lib/kubelet/pods/f06992a6-c074-4f7c-a6db-ea83b8c806dc/volumes" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.462246 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-98s8x_must-gather-fhkjq_f06992a6-c074-4f7c-a6db-ea83b8c806dc/copy/0.log" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.463118 4702 generic.go:334] "Generic (PLEG): container finished" podID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerID="289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff" exitCode=143 Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.463169 4702 scope.go:117] "RemoveContainer" containerID="289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.463327 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-98s8x/must-gather-fhkjq" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.484086 4702 scope.go:117] "RemoveContainer" containerID="227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.523680 4702 scope.go:117] "RemoveContainer" containerID="289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff" Nov 25 11:11:09 crc kubenswrapper[4702]: E1125 11:11:09.524992 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff\": container with ID starting with 289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff not found: ID does not exist" containerID="289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.525041 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff"} err="failed to get container status \"289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff\": rpc error: code = NotFound desc = could not find container \"289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff\": container with ID starting with 289013f13dbd3da0d26f39f8319f0bf796e836d9236adaf8a4bca5cad83171ff not found: ID does not exist" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.525087 4702 scope.go:117] "RemoveContainer" containerID="227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889" Nov 25 11:11:09 crc kubenswrapper[4702]: E1125 11:11:09.525616 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889\": container with ID starting with 227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889 not found: ID does not exist" containerID="227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889" Nov 25 11:11:09 crc kubenswrapper[4702]: I1125 11:11:09.525686 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889"} err="failed to get container status \"227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889\": rpc error: code = NotFound desc = could not find container \"227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889\": container with ID starting with 227a6559a53feb2e89c8333f50bfac680864f50e72145b413a431a181ba26889 not found: ID does not exist" Nov 25 11:11:19 crc kubenswrapper[4702]: I1125 11:11:19.401893 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:11:19 crc kubenswrapper[4702]: E1125 11:11:19.402695 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:11:32 crc kubenswrapper[4702]: I1125 11:11:32.402672 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:11:32 crc kubenswrapper[4702]: E1125 11:11:32.403475 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:11:43 crc kubenswrapper[4702]: I1125 11:11:43.404940 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:11:43 crc kubenswrapper[4702]: E1125 11:11:43.407153 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:11:57 crc kubenswrapper[4702]: I1125 11:11:57.402296 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:11:57 crc kubenswrapper[4702]: E1125 11:11:57.402992 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:12:12 crc kubenswrapper[4702]: I1125 11:12:12.402298 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:12:12 crc kubenswrapper[4702]: E1125 11:12:12.403631 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:12:23 crc kubenswrapper[4702]: I1125 11:12:23.406262 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:12:23 crc kubenswrapper[4702]: E1125 11:12:23.407243 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:12:37 crc kubenswrapper[4702]: I1125 11:12:37.402406 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:12:37 crc kubenswrapper[4702]: E1125 11:12:37.403256 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:12:51 crc kubenswrapper[4702]: I1125 11:12:51.402665 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:12:51 crc kubenswrapper[4702]: E1125 11:12:51.403545 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:13:06 crc kubenswrapper[4702]: I1125 11:13:06.402753 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:13:06 crc kubenswrapper[4702]: E1125 11:13:06.403459 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:13:18 crc kubenswrapper[4702]: I1125 11:13:18.402060 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:13:18 crc kubenswrapper[4702]: E1125 11:13:18.402886 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.302134 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-ll6hq/must-gather-qsmsb"] Nov 25 11:13:23 crc kubenswrapper[4702]: E1125 11:13:23.302746 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="extract-utilities" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.302763 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="extract-utilities" Nov 25 11:13:23 crc kubenswrapper[4702]: E1125 11:13:23.302778 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="extract-content" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.302787 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="extract-content" Nov 25 11:13:23 crc kubenswrapper[4702]: E1125 11:13:23.302799 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="registry-server" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.302807 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="registry-server" Nov 25 11:13:23 crc kubenswrapper[4702]: E1125 11:13:23.302832 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="gather" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.302840 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="gather" Nov 25 11:13:23 crc kubenswrapper[4702]: E1125 11:13:23.302853 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="copy" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.302860 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="copy" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.303007 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ab5674b-fca7-4c29-a7cf-5f11285f1235" containerName="registry-server" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.303026 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="gather" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.303036 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="f06992a6-c074-4f7c-a6db-ea83b8c806dc" containerName="copy" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.303796 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.308390 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-ll6hq"/"default-dockercfg-ftgzp" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.308681 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-ll6hq"/"kube-root-ca.crt" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.314643 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-ll6hq"/"openshift-service-ca.crt" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.316734 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmn6h\" (UniqueName: \"kubernetes.io/projected/193318b0-f021-456c-9695-cd20a12a0997-kube-api-access-wmn6h\") pod \"must-gather-qsmsb\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.316783 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/193318b0-f021-456c-9695-cd20a12a0997-must-gather-output\") pod \"must-gather-qsmsb\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.327301 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-ll6hq/must-gather-qsmsb"] Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.417935 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmn6h\" (UniqueName: \"kubernetes.io/projected/193318b0-f021-456c-9695-cd20a12a0997-kube-api-access-wmn6h\") pod \"must-gather-qsmsb\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.418019 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/193318b0-f021-456c-9695-cd20a12a0997-must-gather-output\") pod \"must-gather-qsmsb\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.419264 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/193318b0-f021-456c-9695-cd20a12a0997-must-gather-output\") pod \"must-gather-qsmsb\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.447789 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmn6h\" (UniqueName: \"kubernetes.io/projected/193318b0-f021-456c-9695-cd20a12a0997-kube-api-access-wmn6h\") pod \"must-gather-qsmsb\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.619445 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:13:23 crc kubenswrapper[4702]: I1125 11:13:23.995228 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-ll6hq/must-gather-qsmsb"] Nov 25 11:13:24 crc kubenswrapper[4702]: I1125 11:13:24.228109 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" event={"ID":"193318b0-f021-456c-9695-cd20a12a0997","Type":"ContainerStarted","Data":"082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0"} Nov 25 11:13:24 crc kubenswrapper[4702]: I1125 11:13:24.228465 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" event={"ID":"193318b0-f021-456c-9695-cd20a12a0997","Type":"ContainerStarted","Data":"7b4d2476362e1ac77ec2ab8895a0f894742c10e51f586bacf04dbf58ed17043d"} Nov 25 11:13:25 crc kubenswrapper[4702]: I1125 11:13:25.235820 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" event={"ID":"193318b0-f021-456c-9695-cd20a12a0997","Type":"ContainerStarted","Data":"e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9"} Nov 25 11:13:25 crc kubenswrapper[4702]: I1125 11:13:25.257267 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" podStartSLOduration=2.257248434 podStartE2EDuration="2.257248434s" podCreationTimestamp="2025-11-25 11:13:23 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2025-11-25 11:13:25.254515956 +0000 UTC m=+2502.621111655" watchObservedRunningTime="2025-11-25 11:13:25.257248434 +0000 UTC m=+2502.623844123" Nov 25 11:13:32 crc kubenswrapper[4702]: I1125 11:13:32.403151 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:13:32 crc kubenswrapper[4702]: E1125 11:13:32.404357 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.129696 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-5s785"] Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.132618 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.140494 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5s785"] Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.311289 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lcpc8\" (UniqueName: \"kubernetes.io/projected/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-kube-api-access-lcpc8\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.311570 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-catalog-content\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.311669 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-utilities\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.413174 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lcpc8\" (UniqueName: \"kubernetes.io/projected/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-kube-api-access-lcpc8\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.413259 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-catalog-content\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.413305 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-utilities\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.413847 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-utilities\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.413990 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-catalog-content\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.437998 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lcpc8\" (UniqueName: \"kubernetes.io/projected/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-kube-api-access-lcpc8\") pod \"redhat-operators-5s785\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.495965 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:39 crc kubenswrapper[4702]: I1125 11:13:39.752695 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-5s785"] Nov 25 11:13:40 crc kubenswrapper[4702]: I1125 11:13:40.322265 4702 generic.go:334] "Generic (PLEG): container finished" podID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerID="97a464e88a497f661dde3df04e832074608938edef5b9e8bbf9aa18ed44f5893" exitCode=0 Nov 25 11:13:40 crc kubenswrapper[4702]: I1125 11:13:40.322312 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerDied","Data":"97a464e88a497f661dde3df04e832074608938edef5b9e8bbf9aa18ed44f5893"} Nov 25 11:13:40 crc kubenswrapper[4702]: I1125 11:13:40.322349 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerStarted","Data":"ff7f2cee940d8ba205bc6be7a38b7c4150938c3019124a11ce16cc7d522c0a41"} Nov 25 11:13:40 crc kubenswrapper[4702]: I1125 11:13:40.324435 4702 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Nov 25 11:13:41 crc kubenswrapper[4702]: I1125 11:13:41.330077 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerStarted","Data":"f196b3ecd108d151fcf4778227731250b9e930502bd7a854a8e8a5765b2d3f8d"} Nov 25 11:13:42 crc kubenswrapper[4702]: I1125 11:13:42.336031 4702 generic.go:334] "Generic (PLEG): container finished" podID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerID="f196b3ecd108d151fcf4778227731250b9e930502bd7a854a8e8a5765b2d3f8d" exitCode=0 Nov 25 11:13:42 crc kubenswrapper[4702]: I1125 11:13:42.336078 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerDied","Data":"f196b3ecd108d151fcf4778227731250b9e930502bd7a854a8e8a5765b2d3f8d"} Nov 25 11:13:43 crc kubenswrapper[4702]: I1125 11:13:43.344132 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerStarted","Data":"a4d1c0e4b836e0c57ac8aa8ec0bade90649fc53ff35405304a8fc95f019d097a"} Nov 25 11:13:43 crc kubenswrapper[4702]: I1125 11:13:43.367150 4702 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-5s785" podStartSLOduration=1.985312878 podStartE2EDuration="4.367129105s" podCreationTimestamp="2025-11-25 11:13:39 +0000 UTC" firstStartedPulling="2025-11-25 11:13:40.324169318 +0000 UTC m=+2517.690765007" lastFinishedPulling="2025-11-25 11:13:42.705985545 +0000 UTC m=+2520.072581234" observedRunningTime="2025-11-25 11:13:43.362266485 +0000 UTC m=+2520.728862194" watchObservedRunningTime="2025-11-25 11:13:43.367129105 +0000 UTC m=+2520.733724814" Nov 25 11:13:45 crc kubenswrapper[4702]: I1125 11:13:45.402095 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:13:45 crc kubenswrapper[4702]: E1125 11:13:45.403963 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:13:49 crc kubenswrapper[4702]: I1125 11:13:49.496531 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:49 crc kubenswrapper[4702]: I1125 11:13:49.497189 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:49 crc kubenswrapper[4702]: I1125 11:13:49.535355 4702 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:50 crc kubenswrapper[4702]: I1125 11:13:50.438426 4702 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:50 crc kubenswrapper[4702]: I1125 11:13:50.718823 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5s785"] Nov 25 11:13:52 crc kubenswrapper[4702]: I1125 11:13:52.402058 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-5s785" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="registry-server" containerID="cri-o://a4d1c0e4b836e0c57ac8aa8ec0bade90649fc53ff35405304a8fc95f019d097a" gracePeriod=2 Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.427376 4702 generic.go:334] "Generic (PLEG): container finished" podID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerID="a4d1c0e4b836e0c57ac8aa8ec0bade90649fc53ff35405304a8fc95f019d097a" exitCode=0 Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.427429 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerDied","Data":"a4d1c0e4b836e0c57ac8aa8ec0bade90649fc53ff35405304a8fc95f019d097a"} Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.658569 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.802457 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-utilities\") pod \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.802551 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lcpc8\" (UniqueName: \"kubernetes.io/projected/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-kube-api-access-lcpc8\") pod \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.802601 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-catalog-content\") pod \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\" (UID: \"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2\") " Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.803932 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-utilities" (OuterVolumeSpecName: "utilities") pod "b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" (UID: "b8a2c4a3-dad6-40a4-81a1-93068b0b24f2"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.809027 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-kube-api-access-lcpc8" (OuterVolumeSpecName: "kube-api-access-lcpc8") pod "b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" (UID: "b8a2c4a3-dad6-40a4-81a1-93068b0b24f2"). InnerVolumeSpecName "kube-api-access-lcpc8". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.900422 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" (UID: "b8a2c4a3-dad6-40a4-81a1-93068b0b24f2"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.903918 4702 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-catalog-content\") on node \"crc\" DevicePath \"\"" Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.904062 4702 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-utilities\") on node \"crc\" DevicePath \"\"" Nov 25 11:13:55 crc kubenswrapper[4702]: I1125 11:13:55.904119 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lcpc8\" (UniqueName: \"kubernetes.io/projected/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2-kube-api-access-lcpc8\") on node \"crc\" DevicePath \"\"" Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.433928 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-5s785" event={"ID":"b8a2c4a3-dad6-40a4-81a1-93068b0b24f2","Type":"ContainerDied","Data":"ff7f2cee940d8ba205bc6be7a38b7c4150938c3019124a11ce16cc7d522c0a41"} Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.433973 4702 scope.go:117] "RemoveContainer" containerID="a4d1c0e4b836e0c57ac8aa8ec0bade90649fc53ff35405304a8fc95f019d097a" Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.434085 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-5s785" Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.464194 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-5s785"] Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.469050 4702 scope.go:117] "RemoveContainer" containerID="f196b3ecd108d151fcf4778227731250b9e930502bd7a854a8e8a5765b2d3f8d" Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.472714 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-5s785"] Nov 25 11:13:56 crc kubenswrapper[4702]: I1125 11:13:56.487724 4702 scope.go:117] "RemoveContainer" containerID="97a464e88a497f661dde3df04e832074608938edef5b9e8bbf9aa18ed44f5893" Nov 25 11:13:57 crc kubenswrapper[4702]: I1125 11:13:57.402178 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:13:57 crc kubenswrapper[4702]: E1125 11:13:57.402388 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:13:57 crc kubenswrapper[4702]: I1125 11:13:57.409875 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" path="/var/lib/kubelet/pods/b8a2c4a3-dad6-40a4-81a1-93068b0b24f2/volumes" Nov 25 11:14:03 crc kubenswrapper[4702]: I1125 11:14:03.637670 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-4jnl2_853aac53-23c5-4f78-a291-bc82dff9e338/control-plane-machine-set-operator/0.log" Nov 25 11:14:03 crc kubenswrapper[4702]: I1125 11:14:03.822559 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7s7k2_af02ba00-7c73-4bc3-a341-5dac59a49e12/machine-api-operator/0.log" Nov 25 11:14:03 crc kubenswrapper[4702]: I1125 11:14:03.858125 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-7s7k2_af02ba00-7c73-4bc3-a341-5dac59a49e12/kube-rbac-proxy/0.log" Nov 25 11:14:10 crc kubenswrapper[4702]: I1125 11:14:10.402250 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:14:10 crc kubenswrapper[4702]: E1125 11:14:10.403023 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:14:19 crc kubenswrapper[4702]: I1125 11:14:19.580873 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-2ck8z_973ff76e-6e14-4f6e-a0cb-fc3e5af9b694/kube-rbac-proxy/0.log" Nov 25 11:14:19 crc kubenswrapper[4702]: I1125 11:14:19.623186 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6c7b4b5f48-2ck8z_973ff76e-6e14-4f6e-a0cb-fc3e5af9b694/controller/0.log" Nov 25 11:14:19 crc kubenswrapper[4702]: I1125 11:14:19.877342 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.015377 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.027536 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.041952 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.076852 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.250968 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.256935 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.257924 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.289721 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.493320 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-reloader/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.496864 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-frr-files/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.499816 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/controller/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.516730 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/cp-metrics/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.677260 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/kube-rbac-proxy/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.699986 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/frr-metrics/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.738735 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/kube-rbac-proxy-frr/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.879629 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/reloader/0.log" Nov 25 11:14:20 crc kubenswrapper[4702]: I1125 11:14:20.923109 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-6998585d5-z5pzw_eea53047-9f4a-400c-8db5-bcb0a8c08967/frr-k8s-webhook-server/0.log" Nov 25 11:14:21 crc kubenswrapper[4702]: I1125 11:14:21.056711 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-d9d597dc8-q6ll4_a6507463-185a-40db-9736-bfcc4f0928e9/manager/0.log" Nov 25 11:14:21 crc kubenswrapper[4702]: I1125 11:14:21.188407 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-rsjl6_776790d4-3b26-4355-b007-928895d8abda/frr/0.log" Nov 25 11:14:21 crc kubenswrapper[4702]: I1125 11:14:21.226869 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-5c9db6d78-h6mmv_f587e69f-3aaf-403e-a060-bf4542e19ec8/webhook-server/0.log" Nov 25 11:14:21 crc kubenswrapper[4702]: I1125 11:14:21.336438 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m5tjs_92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf/kube-rbac-proxy/0.log" Nov 25 11:14:21 crc kubenswrapper[4702]: I1125 11:14:21.436482 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-m5tjs_92bfef0f-1ea1-4d57-bd99-2f1b573d5ddf/speaker/0.log" Nov 25 11:14:22 crc kubenswrapper[4702]: I1125 11:14:22.402668 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:14:22 crc kubenswrapper[4702]: E1125 11:14:22.402940 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:14:36 crc kubenswrapper[4702]: I1125 11:14:36.401747 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:14:36 crc kubenswrapper[4702]: E1125 11:14:36.402492 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:14:43 crc kubenswrapper[4702]: I1125 11:14:43.488009 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-utilities/0.log" Nov 25 11:14:43 crc kubenswrapper[4702]: I1125 11:14:43.634841 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-content/0.log" Nov 25 11:14:43 crc kubenswrapper[4702]: I1125 11:14:43.659128 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-content/0.log" Nov 25 11:14:43 crc kubenswrapper[4702]: I1125 11:14:43.673871 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-utilities/0.log" Nov 25 11:14:43 crc kubenswrapper[4702]: I1125 11:14:43.838871 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-content/0.log" Nov 25 11:14:43 crc kubenswrapper[4702]: I1125 11:14:43.844594 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/extract-utilities/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.061542 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-utilities/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.151647 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-8pk8v_0aeef268-5082-47e1-8bc5-7e66f64509e2/registry-server/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.227760 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-content/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.255765 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-content/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.255785 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-utilities/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.427023 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-content/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.443553 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/extract-utilities/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.640557 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/util/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.862677 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/pull/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.883284 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-nsh8t_efcb9706-ad0f-487f-8aae-e3ebd4b88d7d/registry-server/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.912114 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/pull/0.log" Nov 25 11:14:44 crc kubenswrapper[4702]: I1125 11:14:44.926493 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/util/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.208994 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/extract/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.215661 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/pull/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.267873 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_e8527aae5664f20f24bf3bbb3fd2981ba838928a8a47ce599ee258e4c6qgdqq_a065282b-3c41-4b07-93b3-e29e2502e89a/util/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.396476 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-7mlcv_71f4b5c7-e444-4858-aa1d-4c80e32a7e96/marketplace-operator/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.430606 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-utilities/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.601418 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-utilities/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.626596 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-content/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.649859 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-content/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.820627 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-utilities/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.825826 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/extract-content/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.875392 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-gfmbn_b8b2cad5-dc20-4654-a6e7-4326383a6dda/registry-server/0.log" Nov 25 11:14:45 crc kubenswrapper[4702]: I1125 11:14:45.992834 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-utilities/0.log" Nov 25 11:14:46 crc kubenswrapper[4702]: I1125 11:14:46.186069 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-content/0.log" Nov 25 11:14:46 crc kubenswrapper[4702]: I1125 11:14:46.201515 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-utilities/0.log" Nov 25 11:14:46 crc kubenswrapper[4702]: I1125 11:14:46.208990 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-content/0.log" Nov 25 11:14:46 crc kubenswrapper[4702]: I1125 11:14:46.354341 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-content/0.log" Nov 25 11:14:46 crc kubenswrapper[4702]: I1125 11:14:46.405894 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/extract-utilities/0.log" Nov 25 11:14:46 crc kubenswrapper[4702]: I1125 11:14:46.713203 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-wktsv_22d7c844-b7a4-47a3-893d-16bc54bdeb76/registry-server/0.log" Nov 25 11:14:47 crc kubenswrapper[4702]: I1125 11:14:47.402446 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:14:47 crc kubenswrapper[4702]: E1125 11:14:47.402855 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:14:49 crc kubenswrapper[4702]: I1125 11:14:49.958901 4702 scope.go:117] "RemoveContainer" containerID="a0bfd12615832aa33c4776ec3bf5d837c15a030ff59cee20529568ed10f03617" Nov 25 11:14:59 crc kubenswrapper[4702]: I1125 11:14:59.401896 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:14:59 crc kubenswrapper[4702]: E1125 11:14:59.403389 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.140180 4702 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5"] Nov 25 11:15:00 crc kubenswrapper[4702]: E1125 11:15:00.140485 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="extract-utilities" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.140506 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="extract-utilities" Nov 25 11:15:00 crc kubenswrapper[4702]: E1125 11:15:00.140520 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="registry-server" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.140528 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="registry-server" Nov 25 11:15:00 crc kubenswrapper[4702]: E1125 11:15:00.140550 4702 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="extract-content" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.140561 4702 state_mem.go:107] "Deleted CPUSet assignment" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="extract-content" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.140696 4702 memory_manager.go:354] "RemoveStaleState removing state" podUID="b8a2c4a3-dad6-40a4-81a1-93068b0b24f2" containerName="registry-server" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.141174 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.143694 4702 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.144129 4702 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.154339 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5"] Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.171749 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-config-volume\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.171811 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-secret-volume\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.171834 4702 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dlxjw\" (UniqueName: \"kubernetes.io/projected/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-kube-api-access-dlxjw\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.272859 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-secret-volume\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.272950 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dlxjw\" (UniqueName: \"kubernetes.io/projected/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-kube-api-access-dlxjw\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.273108 4702 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-config-volume\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.274232 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-config-volume\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.285676 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-secret-volume\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.307844 4702 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dlxjw\" (UniqueName: \"kubernetes.io/projected/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-kube-api-access-dlxjw\") pod \"collect-profiles-29401155-6tss5\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.466195 4702 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.709197 4702 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5"] Nov 25 11:15:00 crc kubenswrapper[4702]: W1125 11:15:00.725258 4702 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb4d0859_ec6a_4dde_bd3e_e38934aedb03.slice/crio-2dd015371a7479fbb077fe399ae2191f4a7f716c515ec3a413263a0f9d1159e5 WatchSource:0}: Error finding container 2dd015371a7479fbb077fe399ae2191f4a7f716c515ec3a413263a0f9d1159e5: Status 404 returned error can't find the container with id 2dd015371a7479fbb077fe399ae2191f4a7f716c515ec3a413263a0f9d1159e5 Nov 25 11:15:00 crc kubenswrapper[4702]: I1125 11:15:00.815004 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" event={"ID":"cb4d0859-ec6a-4dde-bd3e-e38934aedb03","Type":"ContainerStarted","Data":"2dd015371a7479fbb077fe399ae2191f4a7f716c515ec3a413263a0f9d1159e5"} Nov 25 11:15:01 crc kubenswrapper[4702]: I1125 11:15:01.823277 4702 generic.go:334] "Generic (PLEG): container finished" podID="cb4d0859-ec6a-4dde-bd3e-e38934aedb03" containerID="cf4c8f5018786077d3d4e610eea303c4c9ba5a73bd783ba571db94644f4ee769" exitCode=0 Nov 25 11:15:01 crc kubenswrapper[4702]: I1125 11:15:01.823358 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" event={"ID":"cb4d0859-ec6a-4dde-bd3e-e38934aedb03","Type":"ContainerDied","Data":"cf4c8f5018786077d3d4e610eea303c4c9ba5a73bd783ba571db94644f4ee769"} Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.095378 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.211617 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dlxjw\" (UniqueName: \"kubernetes.io/projected/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-kube-api-access-dlxjw\") pod \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.211790 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-secret-volume\") pod \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.211850 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-config-volume\") pod \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\" (UID: \"cb4d0859-ec6a-4dde-bd3e-e38934aedb03\") " Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.212621 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-config-volume" (OuterVolumeSpecName: "config-volume") pod "cb4d0859-ec6a-4dde-bd3e-e38934aedb03" (UID: "cb4d0859-ec6a-4dde-bd3e-e38934aedb03"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.218799 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "cb4d0859-ec6a-4dde-bd3e-e38934aedb03" (UID: "cb4d0859-ec6a-4dde-bd3e-e38934aedb03"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.218895 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-kube-api-access-dlxjw" (OuterVolumeSpecName: "kube-api-access-dlxjw") pod "cb4d0859-ec6a-4dde-bd3e-e38934aedb03" (UID: "cb4d0859-ec6a-4dde-bd3e-e38934aedb03"). InnerVolumeSpecName "kube-api-access-dlxjw". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.313622 4702 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-secret-volume\") on node \"crc\" DevicePath \"\"" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.313672 4702 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-config-volume\") on node \"crc\" DevicePath \"\"" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.313686 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dlxjw\" (UniqueName: \"kubernetes.io/projected/cb4d0859-ec6a-4dde-bd3e-e38934aedb03-kube-api-access-dlxjw\") on node \"crc\" DevicePath \"\"" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.835879 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" event={"ID":"cb4d0859-ec6a-4dde-bd3e-e38934aedb03","Type":"ContainerDied","Data":"2dd015371a7479fbb077fe399ae2191f4a7f716c515ec3a413263a0f9d1159e5"} Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.835954 4702 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="2dd015371a7479fbb077fe399ae2191f4a7f716c515ec3a413263a0f9d1159e5" Nov 25 11:15:03 crc kubenswrapper[4702]: I1125 11:15:03.836023 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29401155-6tss5" Nov 25 11:15:04 crc kubenswrapper[4702]: I1125 11:15:04.168977 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld"] Nov 25 11:15:04 crc kubenswrapper[4702]: I1125 11:15:04.179169 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29401110-dcvld"] Nov 25 11:15:05 crc kubenswrapper[4702]: I1125 11:15:05.409392 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="32c118a6-a92d-47fb-8169-bccbb5e51072" path="/var/lib/kubelet/pods/32c118a6-a92d-47fb-8169-bccbb5e51072/volumes" Nov 25 11:15:10 crc kubenswrapper[4702]: I1125 11:15:10.402416 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:15:10 crc kubenswrapper[4702]: E1125 11:15:10.403135 4702 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-g5m5h_openshift-machine-config-operator(5b72fbd8-190c-44a0-bdf1-ed4523f82cc2)\"" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" Nov 25 11:15:24 crc kubenswrapper[4702]: I1125 11:15:24.402364 4702 scope.go:117] "RemoveContainer" containerID="9958dc1ce1ab004eec11a0858c304f714b97341992e371293443777ef42db3c0" Nov 25 11:15:26 crc kubenswrapper[4702]: I1125 11:15:26.154578 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" event={"ID":"5b72fbd8-190c-44a0-bdf1-ed4523f82cc2","Type":"ContainerStarted","Data":"3cbf1b77de7d5002ed3b07c962044ae22aa9137406e5b54e6f063bed28ebfc8c"} Nov 25 11:15:50 crc kubenswrapper[4702]: I1125 11:15:50.010408 4702 scope.go:117] "RemoveContainer" containerID="a4b65bfbc71f8464a9d2f7a1942602a7efcba0937961f5aff9d9d41d3faa3436" Nov 25 11:15:54 crc kubenswrapper[4702]: I1125 11:15:54.381011 4702 generic.go:334] "Generic (PLEG): container finished" podID="193318b0-f021-456c-9695-cd20a12a0997" containerID="082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0" exitCode=0 Nov 25 11:15:54 crc kubenswrapper[4702]: I1125 11:15:54.381080 4702 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" event={"ID":"193318b0-f021-456c-9695-cd20a12a0997","Type":"ContainerDied","Data":"082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0"} Nov 25 11:15:54 crc kubenswrapper[4702]: I1125 11:15:54.381749 4702 scope.go:117] "RemoveContainer" containerID="082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0" Nov 25 11:15:54 crc kubenswrapper[4702]: I1125 11:15:54.503505 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-ll6hq_must-gather-qsmsb_193318b0-f021-456c-9695-cd20a12a0997/gather/0.log" Nov 25 11:16:03 crc kubenswrapper[4702]: I1125 11:16:03.513050 4702 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-ll6hq/must-gather-qsmsb"] Nov 25 11:16:03 crc kubenswrapper[4702]: I1125 11:16:03.513837 4702 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" podUID="193318b0-f021-456c-9695-cd20a12a0997" containerName="copy" containerID="cri-o://e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9" gracePeriod=2 Nov 25 11:16:03 crc kubenswrapper[4702]: I1125 11:16:03.517079 4702 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-ll6hq/must-gather-qsmsb"] Nov 25 11:16:03 crc kubenswrapper[4702]: I1125 11:16:03.903569 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-ll6hq_must-gather-qsmsb_193318b0-f021-456c-9695-cd20a12a0997/copy/0.log" Nov 25 11:16:03 crc kubenswrapper[4702]: I1125 11:16:03.904436 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.002074 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/193318b0-f021-456c-9695-cd20a12a0997-must-gather-output\") pod \"193318b0-f021-456c-9695-cd20a12a0997\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.002142 4702 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmn6h\" (UniqueName: \"kubernetes.io/projected/193318b0-f021-456c-9695-cd20a12a0997-kube-api-access-wmn6h\") pod \"193318b0-f021-456c-9695-cd20a12a0997\" (UID: \"193318b0-f021-456c-9695-cd20a12a0997\") " Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.007473 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/193318b0-f021-456c-9695-cd20a12a0997-kube-api-access-wmn6h" (OuterVolumeSpecName: "kube-api-access-wmn6h") pod "193318b0-f021-456c-9695-cd20a12a0997" (UID: "193318b0-f021-456c-9695-cd20a12a0997"). InnerVolumeSpecName "kube-api-access-wmn6h". PluginName "kubernetes.io/projected", VolumeGidValue "" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.068707 4702 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/193318b0-f021-456c-9695-cd20a12a0997-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "193318b0-f021-456c-9695-cd20a12a0997" (UID: "193318b0-f021-456c-9695-cd20a12a0997"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.103079 4702 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/193318b0-f021-456c-9695-cd20a12a0997-must-gather-output\") on node \"crc\" DevicePath \"\"" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.103122 4702 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmn6h\" (UniqueName: \"kubernetes.io/projected/193318b0-f021-456c-9695-cd20a12a0997-kube-api-access-wmn6h\") on node \"crc\" DevicePath \"\"" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.442778 4702 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-ll6hq_must-gather-qsmsb_193318b0-f021-456c-9695-cd20a12a0997/copy/0.log" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.443823 4702 generic.go:334] "Generic (PLEG): container finished" podID="193318b0-f021-456c-9695-cd20a12a0997" containerID="e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9" exitCode=143 Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.443933 4702 scope.go:117] "RemoveContainer" containerID="e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.444054 4702 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-ll6hq/must-gather-qsmsb" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.462760 4702 scope.go:117] "RemoveContainer" containerID="082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.515322 4702 scope.go:117] "RemoveContainer" containerID="e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9" Nov 25 11:16:04 crc kubenswrapper[4702]: E1125 11:16:04.515933 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9\": container with ID starting with e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9 not found: ID does not exist" containerID="e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.515976 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9"} err="failed to get container status \"e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9\": rpc error: code = NotFound desc = could not find container \"e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9\": container with ID starting with e4d8a3d4da92ac4341b3ac204596c00ce1fd00b96b694f24f51d393d51618cd9 not found: ID does not exist" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.516012 4702 scope.go:117] "RemoveContainer" containerID="082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0" Nov 25 11:16:04 crc kubenswrapper[4702]: E1125 11:16:04.516419 4702 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0\": container with ID starting with 082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0 not found: ID does not exist" containerID="082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0" Nov 25 11:16:04 crc kubenswrapper[4702]: I1125 11:16:04.516445 4702 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0"} err="failed to get container status \"082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0\": rpc error: code = NotFound desc = could not find container \"082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0\": container with ID starting with 082608a67d52e29a62e7a8b3f3ed23e23d7758ab33c0f8238e28873f400da7f0 not found: ID does not exist" Nov 25 11:16:05 crc kubenswrapper[4702]: I1125 11:16:05.409675 4702 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="193318b0-f021-456c-9695-cd20a12a0997" path="/var/lib/kubelet/pods/193318b0-f021-456c-9695-cd20a12a0997/volumes" Nov 25 11:17:43 crc kubenswrapper[4702]: I1125 11:17:43.591393 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:17:43 crc kubenswrapper[4702]: I1125 11:17:43.591997 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Nov 25 11:18:13 crc kubenswrapper[4702]: I1125 11:18:13.590976 4702 patch_prober.go:28] interesting pod/machine-config-daemon-g5m5h container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Nov 25 11:18:13 crc kubenswrapper[4702]: I1125 11:18:13.592032 4702 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-g5m5h" podUID="5b72fbd8-190c-44a0-bdf1-ed4523f82cc2" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515111310222024431 0ustar coreroot‹íÁ  ÷Om7 €7šÞ'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015111310223017347 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015111302277016504 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015111302300015437 5ustar corecore